Skip to content

Commit cd5dadb

Browse files
liammcgoldliammcgold
liammcgold
authored and
liammcgold
committed
Initial commit
0 parents  commit cd5dadb

File tree

6 files changed

+10094
-0
lines changed

6 files changed

+10094
-0
lines changed

Dataloader.py

+71
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
from torch.utils import data
2+
import os
3+
from multiprocessing import Lock
4+
import random
5+
from PIL import Image
6+
import numpy as np
7+
import torch
8+
9+
10+
class FolderDataset(data.Dataset, ):
11+
12+
def __init__(self, folder_loc, files=None):
13+
if folder_loc[-1] != "/":
14+
self.loc = folder_loc + "/"
15+
else:
16+
self.loc = folder_loc
17+
if files is None:
18+
self.files = [str(x) for x in os.listdir(folder_loc) if x.endswith('.jpg')]
19+
else:
20+
self.files = files
21+
self.locks = [Lock() for _ in self.files]
22+
23+
random.Random().shuffle(self.files)
24+
25+
def __getitem__(self, index):
26+
file = self.files[index]
27+
age = int(file.split("_")[0])
28+
with self.locks[index]:
29+
im = Image.open(self.loc + file)
30+
im = np.asarray(im)
31+
return (im, age)
32+
33+
def __len__(self):
34+
return len(self.files)
35+
36+
# Train Test Val
37+
38+
39+
class FolderDataloader(data.DataLoader):
40+
41+
def __init__(self, dataset, collate_fn=None, num_workers=2, batch_size=1):
42+
super().__init__(dataset, collate_fn=collate_fn, num_workers=num_workers, batch_size=batch_size, shuffle=True)
43+
44+
45+
def collate(batch):
46+
images = np.asarray([x[0] for x in batch])
47+
labels = [x[1] for x in batch]
48+
images = np.einsum("bxyc->bcxy", images)
49+
50+
return torch.tensor(images, dtype=torch.float), torch.tensor(labels, dtype=torch.uint8)
51+
52+
53+
def generate_dataloaders(folder_loc, split=(0.6, 0.3, 0.1), collate_fxn=collate, num_workers=2, batch_size=1):
54+
files = [str(x) for x in os.listdir(folder_loc) if x.endswith('.jpg')]
55+
random.Random(0).shuffle(files)
56+
split_a = int(len(files) * split[0])
57+
split_b = split_a + int(len(files) * split[1])
58+
split_c = split_b + int(len(files) * split[2])
59+
60+
train = files[0:split_a]
61+
test = files[split_a:split_b]
62+
val = files[split_b:]
63+
64+
train_ds = FolderDataset(folder_loc, files=train)
65+
train_dl = FolderDataloader(train_ds, collate_fn=collate_fxn, num_workers=num_workers, batch_size=batch_size)
66+
test_ds = FolderDataset(folder_loc, files=test)
67+
test_dl = FolderDataloader(test_ds, collate_fn=collate_fxn, num_workers=num_workers)
68+
val_ds = FolderDataset(folder_loc, files=val)
69+
val_dl = FolderDataloader(val_ds, collate_fn=collate_fxn, num_workers=num_workers)
70+
71+
return train_dl, test_dl, val_dl

Logging.py

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
def log(folder, split, batch_norm, probabilistic, epochs, output_folder, lr, batch_size):
2+
params = [str(lr),
3+
str(probabilistic),
4+
str(split),
5+
str(batch_norm),
6+
str(epochs),
7+
str(batch_size),
8+
str(folder),
9+
str(output_folder)]
10+
log = ""
11+
for entry in params:
12+
log += "," + entry
13+
14+
with open(output_folder + "Description.txt", "w") as file:
15+
file.write(log)

Losses.py

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
from torch import distributions as d
2+
from torch.nn import functional as F
3+
4+
5+
def reg_loss(x, y):
6+
target = y.float().unsqueeze(-1).to('cuda')
7+
return F.mse_loss(x, target)
8+
9+
10+
def prob_loss(x, y):
11+
n = d.Normal(x[0][0], x[0][1])
12+
return -1 * n.log_prob(y)

Models.py

+88
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
import torch as t
2+
from torch import nn
3+
4+
5+
class Regress(nn.Module):
6+
def __init__(self, probabilistic=False, in_chan=3, batch_norm=False):
7+
super(Regress, self).__init__()
8+
n_out = 1 + int(probabilistic)
9+
self.conv_blocks = nn.Sequential(
10+
ResMod(in_chan=in_chan, out_chan=in_chan, batch_norm=batch_norm),
11+
ResMod(in_chan=in_chan, out_chan=in_chan, batch_norm=batch_norm),
12+
nn.Conv2d(in_chan, 16, 3, stride=2),
13+
nn.Conv2d(16, 32, 3, stride=2),
14+
nn.Conv2d(32, 64, 3, stride=2),
15+
nn.Conv2d(64, 128, 3, stride=2),
16+
)
17+
if probabilistic:
18+
s = 210
19+
else:
20+
s = 190
21+
adjustment = s // 2 ** 4
22+
23+
# Output layers
24+
self.dense_unit = nn.Sequential(nn.Linear(128 * adjustment ** 2, n_out * 16),
25+
nn.Linear(n_out * 16, n_out * 8),
26+
nn.Linear(n_out * 8, n_out * 4),
27+
nn.Linear(n_out * 4, n_out * 2),
28+
nn.Linear(n_out * 2, n_out))
29+
30+
self.frozen = False
31+
32+
def forward(self, img):
33+
out = self.conv_blocks(img)
34+
out = out.view(out.shape[0], -1)
35+
y = self.dense_unit(out)
36+
return y
37+
38+
39+
class ResMod(nn.Module):
40+
def __init__(self, batch_norm=False, in_chan=1, out_chan=1):
41+
super(ResMod, self).__init__()
42+
43+
self.in_chan = in_chan
44+
self.out_chan = out_chan
45+
46+
###############
47+
# first block #
48+
###############
49+
module = nn.ModuleList()
50+
51+
module.append(nn.Conv2d(in_chan, out_chan, (3, 3), padding=1))
52+
53+
if batch_norm:
54+
module.append(nn.BatchNorm2d(out_chan))
55+
56+
module.append(nn.ReLU())
57+
58+
self.first_block = module
59+
60+
################
61+
# second block #
62+
################
63+
module = nn.ModuleList()
64+
65+
module.append(nn.Conv2d(out_chan, out_chan, (3, 3), padding=1))
66+
67+
if batch_norm:
68+
module.append(nn.BatchNorm2d(out_chan))
69+
70+
self.second_block = module
71+
72+
self.shortcut = nn.Conv2d(in_chan, out_chan, (1, 1))
73+
74+
self.relu = nn.ReLU()
75+
76+
def forward(self, x):
77+
residual = x
78+
79+
for module in self.first_block:
80+
x = module(x)
81+
for module in self.second_block:
82+
x = module(x)
83+
84+
x += self.shortcut(residual)
85+
86+
x = self.relu(x)
87+
88+
return x

0 commit comments

Comments
 (0)