-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathtrain_FLANG_ELECTRA.py
44 lines (34 loc) · 1014 Bytes
/
train_FLANG_ELECTRA.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from simpletransformers.language_modeling import LanguageModelingModel
import logging
logging.basicConfig(level=logging.INFO)
transformers_logger = logging.getLogger("transformers")
transformers_logger.setLevel(logging.WARNING)
train_args = {
"save_steps": 10000,
"reprocess_input_data": False,
"overwrite_output_dir": False,
"num_train_epochs": 4,
#"learning_rate": 1e-4,
"warmup_steps": 100000,
"train_batch_size": 96,
"eval_batch_size": 96,
"output_dir": 'data/FLANG_ELECTRA',
"gradient_accumulation_steps": 1,
"n_gpu": 2
}
train_file = "/path-to-train-file"
test_file = "/path-to-test-file"
model = LanguageModelingModel(
"electra",
"electra",
args = train_args,
train_file = train_file,
generator_name="google/electra-base-generator",
discriminator_name='google/electra-base-discriminator',
)
model.train_model(
train_file, eval_file=test_file,
)
model.eval_model(test_file)
model.save_discriminator()
model.save_generator()