-
Notifications
You must be signed in to change notification settings - Fork 0
/
expressions_answer.py
75 lines (57 loc) · 2.5 KB
/
expressions_answer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
'''
Created on 16 feb. 2016
@author: Robert-Jan
'''
import time;
import sys;
from tools.file import save_to_pickle;
from tools.arguments import processCommandLineArguments;
from tools.model import train, test, constructModels;
from tools.gpu import using_gpu; # @UnresolvedImport
import numpy as np;
#import theano;
#theano.config.mode = 'FAST_COMPILE'
def writeToVerbose(verboseOutputter, s):
f = verboseOutputter['f']();
f.write(s + '\n');
f.close();
if (__name__ == '__main__'):
np.set_printoptions(precision=3, threshold=10000000);
# Specific settings - default name is time of experiment
name = time.strftime("%d-%m-%Y_%H-%M-%S");
saveModels = True;
# Process parameters
parameters = processCommandLineArguments(sys.argv[1:]);
# Set up extreme verbose output
# Can always be called but will not do anything if not needed
if (parameters['extreme_verbose']):
if (parameters['tensorflow']):
raise ValueError("Feature extreme_verbose = True not supported in combination with tensorflow = True!");
verboseOutputter = {'name': './verbose_output/%s.debug' % name};
verboseOutputter['f'] = lambda: open(verboseOutputter['name'],'a');
verboseOutputter['write'] = lambda s: writeToVerbose(verboseOutputter, s);
else:
verboseOutputter = None;
# Ask for seed if running random baseline
seed = 0;
if (parameters['random_baseline']):
seed = int(raw_input("Please provide an integer seed for the random number generation: "));
# Warn for unusual parameters
if (parameters['max_training_size'] is not False):
print("WARNING! RUNNING WITH LIMIT ON TRAINING SIZE!");
if (not using_gpu()):
print("WARNING! RUNNING WITHOUT GPU USAGE!");
# Construct models
datasets, rnn = constructModels(parameters, seed, verboseOutputter);
### From here the experiment should be the same every time
# Start experiment clock
start = time.clock();
# Train on all datasets in succession
train(rnn, datasets, parameters, name, start, saveModels=saveModels, targets=not parameters['single_digit'], verboseOutputter=verboseOutputter);
print("Training all datasets finished!");
# Final test on last dataset
test(rnn, datasets[-1], parameters, start, show_prediction_conf_matrix=False);
# Save weights to pickles
if (saveModels):
saveVars = rnn.getVars();
save_to_pickle('saved_models/%s.model' % name, saveVars);