-
Notifications
You must be signed in to change notification settings - Fork 0
/
util.py
67 lines (57 loc) · 1.8 KB
/
util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import torch
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torch.utils.data as data
import numpy as np
# Random sampler wrapper class
def InfiniteSampler(n):
"""Data sampler"""
i = n - 1
order = np.random.permutation(n)
while True:
yield order[i]
i += 1
if i >= n:
np.random.seed()
order = np.random.permutation(n)
i = 0
class InfiniteSamplerWrapper(data.sampler.Sampler):
"""Data sampler wrapper"""
def __init__(self, data_source):
self.num_samples = len(data_source)
def __iter__(self):
return iter(InfiniteSampler(self.num_samples))
def __len__(self):
return 2 ** 31
def get_dataloaders(data_dir, imsize, batch_size, eval_size, num_workers=1):
r"""
Creates a dataloader from a directory containing image data.
"""
dataset = datasets.ImageFolder(
root=data_dir,
transform=transforms.Compose(
[
transforms.Resize(imsize),
transforms.CenterCrop(imsize),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]
),
)
eval_dataset, train_dataset = torch.utils.data.random_split(
dataset,
[eval_size, len(dataset) - eval_size],
)
eval_dataloader = torch.utils.data.DataLoader(
eval_dataset, batch_size=batch_size, num_workers=num_workers
)
train_dataloader = torch.utils.data.DataLoader(
train_dataset,
batch_size=batch_size,
shuffle=False,
sampler=InfiniteSamplerWrapper(train_dataset),
num_workers=num_workers,
pin_memory=True
)
return train_dataloader, eval_dataloader