forked from tensorflow/tensor2tensor
-
Notifications
You must be signed in to change notification settings - Fork 1
/
startup.sh
156 lines (92 loc) · 2.79 KB
/
startup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
#!/bin/bash
cd ..
sudo rm -r tensor2tensor
git clone https://github.com/jonmcwong/tensor2tensor.git
cd tensor2tensor
git config --global user.name jonmcwong
git config --global user.email [email protected]
./setup.sh
./single_smart_evaluate.sh Q8 transformer data-easy-2020-06-24 8
./single_smart_evaluate.sh Q12 transformer base-relu-dp-01-2020-06-25 0
./single_smart_evaluate.sh Q12 transformer base-relu-dp-00-2020-06-25 0
./single_smart_evaluate.sh Q12 \
universal_transformer ut-pres2-2020-06-29 11
export file_name=results-universal_transformer-ut-pres2-2020-06-29
gsutil -m cp -r \
gs://mathsreasoning/${file_name} \
$PWD
gsutil -m cp gs://us_bucketbucket/${file_name}/* \
$PWD/${file_name}
export file_name=universal_transformer-ut-pres2-2020-06-29
gsutil -m cp -r \
gs://mathsreasoning/t2t_train/algorithmic_math_deepmind_all/${file_name} \
gs://us_bucketbucket/t2t_train/algorithmic_math_deepmind_all/
git checkout .
git pull
./rebuild.sh
chmod +x single_smart_evaluate.sh
exit
git checkout .
git pull
./single_smart_evaluate.sh Q8 transformer data-easy-2020-06-24 8
./setup.sh
screen
./single_smart_evaluate.sh transformer base_test-dropout01-2020-06-24 1
./single_smart_evaluate.sh universal_transformer global-lowerlr0-02-2020-06-23 0
./single_smart_evaluate.sh universal_transformer global-lowerlr0-02-2020-06-23 4
git pull
chmod +x single_smart_evaluate.sh
gs://mathsreasoning/t2t_train/algorithmic_math_deepmind_all/transformer-data-easy-2020-06-24 \
git clone https://github.com/jonmcwong/FYP_code.git
git clone https://github.com/jonmcwong/PyTorch-Beam-Search-Decoding.git
# UT global graph
plt.clf()
plot_against_steps(make_md([
"universal_transformer-global-lowerlr0-02-2020-06-23",
], [
"all",
]),
xlim=(-5000, 105000),
ylim=(0, 1),
title="Accuracies By Question Type During Universal Transformer Training",
save_name="Latest_plot.png",
font_size=18,
include_model_name=False,
)
# base test dropout 01
plot_against_steps(make_md([
"combined_transformer-base-dropout01",
], [
"all",
]),
xlim=(-10000, 905000),
ylim=(-0.05, 1.05),
title="Accuracies By Question Type During Transformer Base Training",
save_name="Latest_plot.png",
font_size=20,
include_model_name=False,
multi_model=False,
)
plot_against_difficulty(holder8,
title="Transformer Accuracy Against Question Difficulty",
)
plt.clf()
fig, axs = plt.subplots(1, 1, squeeze=False)
plot_against_steps(
axs[0][0],
make_md([
# "universal_transformer-ut-pres-2020-06-28",
"universal_transformer-ut-pres2-2020-06-29",
], [
"all"
]),
xlim=(200000, 400000),
ylim=(-0.05, 1.05),
title="Accuracies By Question Type During Universal Transformer Training",
save_name="Latest_plot.png",
font_size=20,
include_model_name=True,
multi_model=False,
include_transformer_type=False,
zeroed=False,
)