-
Notifications
You must be signed in to change notification settings - Fork 13
/
test_warmup.py
64 lines (49 loc) · 3.49 KB
/
test_warmup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import unittest
from easy_bert.bert4classification.classification_trainer import ClassificationTrainer
from easy_bert.bert4sequence_labeling.sequence_labeling_trainer import SequenceLabelingTrainer
class MyTestCase(unittest.TestCase):
def setUp(self):
self.model_dir = './tests/test_model'
self.pretrained_model_dir = './models/chinese-roberta-wwm-ext'
def test_classification(self):
print('test_classification~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
texts = ['天气真好', '今天运气很差']
labels = ['正面', '负面']
print("warmup_type=None")
trainer = ClassificationTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type=None)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
print("warmup_type='constant'")
trainer = ClassificationTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type='constant', warmup_step_num=0.5)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
print("warmup_type='linear'")
trainer = ClassificationTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type='linear', warmup_step_num=10)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
print("warmup_type='cosine'")
trainer = ClassificationTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type='cosine', warmup_step_num=10)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
def test_sequence_labeling(self):
print('test_sequence_labeling~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
texts = [['你', '好', '呀'], ['一', '马', '当', '先', '就', '是', '好']]
labels = [['B', 'E', 'S'], ['B', 'M', 'M', 'E', 'S', 'S', 'S']]
print("warmup_type=None")
trainer = SequenceLabelingTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type=None)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
print("warmup_type='constant'")
trainer = SequenceLabelingTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type='constant', warmup_step_num=0.5)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
print("warmup_type='linear'")
trainer = SequenceLabelingTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type='linear', warmup_step_num=10)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
print("warmup_type='cosine'")
trainer = SequenceLabelingTrainer(self.pretrained_model_dir, self.model_dir, learning_rate=5e-5,
warmup_type='cosine', warmup_step_num=10)
trainer.train(texts, labels, validate_texts=texts, validate_labels=labels, batch_size=2, epoch=20)
if __name__ == '__main__':
unittest.main()