-
Notifications
You must be signed in to change notification settings - Fork 29
/
utils.py
56 lines (45 loc) · 1.72 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import torch
import os
from src.dataset import Multimodal_Datasets
def get_data(args, dataset, split='train'):
data_path = os.path.join(args.data_path, dataset) + f'_{split}.dt'
if not os.path.exists(data_path):
print(f" - Creating new {split} data")
data = Multimodal_Datasets(args.data_path, dataset, split)
torch.save(data, data_path)
else:
print(f" - Found cached {split} data")
data = torch.load(data_path)
return data
def save_load_name(args, name=''):
load_name = name + '_' + args.model
return load_name
def save_model(args, model, name=''):
if not os.path.exists('output/'):
os.makedirs('output/')
name = save_load_name(args, name)
torch.save(model, f'output/{args.name}.pt')
def load_model(args, name=''):
name = save_load_name(args, name)
model = torch.load(f'output/{args.name}.pt')
return model
class focalloss(nn.Module):
def __init__(self, alpha=[0.1, 0.1, 0.8], gamma=3, reduction='mean'):
super(focalloss, self).__init__()
self.alpha = torch.tensor(alpha)
self.gamma = gamma
self.reduction = reduction
def forward(self, pred, target):
target = remake_label(target).type(torch.int64)
alpha = self.alpha[target]
log_softmax = torch.log_softmax(pred, dim=1)
logpt = torch.gather(log_softmax, dim=1, index=target.view(-1, 1))
logpt = logpt.view(-1)
ce_loss = -logpt
pt = torch.exp(logpt)
focal_loss = alpha * ((1 - pt) ** self.gamma * ce_loss).t()
if self.reduction == "mean":
return torch.mean(focal_loss)
if self.reduction == "sum":
return torch.sum(focal_loss)
return focal_loss