NanaEilish commited on
Commit
c207c52
1 Parent(s): 921c6c2

Add/Update Model

Browse files
runs/events.out.tfevents.1698240792.ikmlab-ESC4000-G4.458498.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a24fc9208485ff11c322370c14fa8747d107a3568a49bff387a671a720549077
3
+ size 2728
runs/hparams.yaml ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hparam: !!python/object:argparse.Namespace
2
+ accumulate_grad_batches: 1
3
+ adam_epsilon: 1.0e-08
4
+ callback: !!python/object:argparse.Namespace
5
+ early_stop: true
6
+ lr_scheduler: true
7
+ model_checkpoint: true
8
+ push_to_hub: true
9
+ dataset: !!python/object:argparse.Namespace
10
+ config: english_v12
11
+ name: conll2012_ontonotesv5
12
+ eval_batch_size: 8
13
+ huggingface_hub: !!python/object:argparse.Namespace
14
+ repo_name: t5_conll_ontonotes_en14
15
+ user_name: NanaEilish
16
+ log_name: t5_pretrain
17
+ lr: 0.0003
18
+ lr_decay_min_lr: 1.0e-05
19
+ lr_decay_rate: 0.5
20
+ lr_decay_steps: 20
21
+ lr_scheduler: linear_wmup
22
+ max_epochs: 5
23
+ max_grad_norm: 1
24
+ max_len: 512
25
+ max_output_len: 100
26
+ model_name: t5-base
27
+ monitor_max_or_min: min
28
+ monitor_metrics: val_loss
29
+ n_gpu: 4
30
+ num_workers: 8
31
+ output_dir: output/t5_pretrain
32
+ seed: 42
33
+ train_batch_size: 8
34
+ train_size: 30
35
+ valid_size: 1
36
+ warmup_ratio: 0.2
37
+ weight_decay: 1.0e-05