-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathclassifier.py
46 lines (40 loc) · 1.57 KB
/
classifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import os
from processor import DataProcessor
import torch
from torch import nn
from torch import optim
from torch.autograd import Variable
from dotenv import load_dotenv
load_dotenv()
# Potentially extend nn.Module ?
# Word to vec pytorch use as embedding layer
class SimulacrumClassifier():
def __init__(self):
# super().__init__()
processor = DataProcessor(os.getenv("SIMULACRUM_NAME"))
self.train_X, self.test_X, self.train_y, self.test_y = processor.process()
self.num_epoch = int(os.getenv("CLASSIFIER_NUM_EPOCH"))
self.model = nn.Sequential(
# nn.Embedding(200, 1),
# nn.ReLU(),
# nn.MaxPool1d(1),
# nn.Flatten(),
nn.Linear(200, 1)
)
self.loss = nn.BCEWithLogitsLoss()
self.optimizer = optim.SGD(self.model.parameters(), lr=0.05, momentum=0.9, weight_decay=0.001)
def train(self):
temp_X = Variable(torch.Tensor(self.train_X))
temp_y = torch.unsqueeze(Variable(torch.Tensor(self.train_y)), 1)
for epoch in range(self.num_epoch):
y_predict = self.model(temp_X)
# print(y_predict[0], temp_y[0])
loss_value = self.loss(y_predict, temp_y)
self.model.zero_grad()
loss_value.backward()
self.optimizer.step()
if epoch == 1 or epoch % 50 == 0:
print(f"Epoch {epoch} had training loss {loss_value}")
# classifier = SimulacrumClassifier()
# classifier.train()
# torch.save(classifier.model, "data/model.pt")