[c3444c]: / pretrain / sampler_util.py

Download this file

54 lines (47 with data), 1.6 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import torch
from torch.utils.data import Dataset, DataLoader
from torch.utils.data.sampler import Sampler, RandomSampler
"""
class TmpDataset(Dataset):
def __init__(self, m=10):
self.len = m
def __getitem__(self, index):
return (list(range(10)) * index, [0] * index)
def __len__(self):
return self.len
"""
class FixedLengthBatchSampler(Sampler):
def __init__(self, sampler, fixed_length, drop_last):
self.sampler = sampler
self.fixed_length = fixed_length
self.drop_last = drop_last
self.rel_sampler_count = 0
def __iter__(self):
batch = []
now_length = 0
for idx in self.sampler:
#print(batch, now_length)
sample_length = len(self.sampler.data_source[idx][-1]) * 3
if now_length + sample_length > self.fixed_length:
#print(batch, now_length)
yield batch
batch = []
now_length = 0
batch.append(idx)
now_length += sample_length
self.rel_sampler_count += 1
if len(batch) > 0 and not self.drop_last:
yield batch
def my_collate_fn(batch):
type_count = len(batch[0])
batch_size = sum([len(item[-1]) for item in batch])
output = ()
for i in range(type_count):
tmp = []
for item in batch:
tmp.extend(item[i])
if len(tmp) <= batch_size:
output += (torch.LongTensor(tmp),)
else:
output += (torch.LongTensor(tmp).reshape(batch_size, -1),)
return output