-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtrain.py
124 lines (104 loc) · 3.66 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import numpy as np
import cv2
import matplotlib.pyplot as plt
from keras.datasets import fashion_mnist, mnist
from keras.utils import to_categorical
from neuralnetwork import NeuralNetwork
from sklearn.model_selection import train_test_split
import sys
import wandb
import pickle
def main(argv):
opts = []
args = []
params = {}
project_name = ''
entity_name = ''
dataset = 'fashion_mnist'
n_epochs = 10
run_name = 'model_run'
for i in range(0, len(argv)):
if i%2 == 0:
opts.append(argv[i])
else:
args.append(argv[i])
for opt, arg in zip(opts, args):
if opt == '-wp' or opt == '--wandb_project':
project_name = arg
elif opt == '-we' or opt == '--wandb_entity':
entity_name = arg
elif opt == '-d' or opt == '--dataset':
dataset = arg
elif opt == '-e' or opt == '--epochs':
n_epochs = int(arg)
elif opt == '-b' or opt == '--batch_size':
params['batch_size'] = int(arg)
elif opt == '-l' or opt == '--loss':
params['loss_type'] = arg
elif opt == '-o' or opt == '--optimizer':
params['optimizer'] = arg
elif opt == '-lr' or opt == '--learning_rate':
params['lr'] = float(arg)
elif opt == '-m' or opt == '--momentum':
params['m_factor'] = float(arg)
elif opt == '-beta' or opt == '--beta':
params['beta'] = float(arg)
elif opt == '-beta1' or opt == '--beta1':
params['beta1'] = float(arg)
elif opt == '-beta2' or opt == '--beta2':
params['beta2'] = float(arg)
elif opt == '-w_d' or opt == '--weight_decay':
params['lamda'] = float(arg)
elif opt == '-w_i' or opt == '--weight_init':
params['weight_init'] = arg
elif opt == '-nhl' or opt == '--num_layers':
params['n_hidden'] = int(arg)
elif opt == '-sz' or opt == '--hidden_size':
params['hl_size'] = int(arg)
elif opt == '-a' or opt == '--activation':
params['activation_fn'] = arg
elif opt == '-rn' or opt == '--run_name':
run_name = arg
else:
print('Follow the format to run the script.')
sys.exit()
assert project_name != '' and entity_name != ''
wandb.init(entity=entity_name,project=project_name, name="mse_run")
network = NeuralNetwork(**params)
if dataset == 'fashion_mnist':
(trainX, trainy), (testX, testy) = fashion_mnist.load_data()
elif dataset == 'mnist':
(trainX, trainy), (testX, testy) = mnist.load_data()
else:
print('Wrong Dataset.')
sys.exit()
trainX = trainX/255
testX = testX/255
trainX, valX, trainy, valy = train_test_split(trainX, trainy, test_size=0.1, random_state=40)
x_train = []
x_test = []
x_val = []
for i in range(0, len(trainX)):
x_train.append(trainX[i,:,:].flatten())
for i in range(0, len(testX)):
x_test.append(testX[i,:,:].flatten())
for i in range(0, len(valX)):
x_val.append(valX[i,:,:].flatten())
y_train = to_categorical(trainy)
y_test = to_categorical(testy)
y_val = to_categorical(valy)
print('Training process started with '+network.optimizer+' optimizer.')
for i in range(0, n_epochs):
train_loss,train_acc,test_acc,test_loss = network.epoch(x_train, y_train, x_val, y_val, i)
log_dict = {"train_loss":train_loss, "train_accuracy":train_acc, "validation_loss":test_loss, "validation_accuracy":test_acc, "epoch":i+1}
wandb.log(log_dict)
wandb.run.save()
wandb.run.finish()
test_accuracy = network.inference(x_test, y_test, loss_flag = False)
print(f'The test accuracy is {test_accuracy}.')
file_name = 'model.pkl'
with open(file_name, 'wb') as file:
pickle.dump(network, file)
print(f'Network successfully saved to "{file_name}"')
if __name__ == "__main__":
main(sys.argv[1:])