|
{ |
|
"training_arguments": { |
|
"output_dir": "./models/protgpt2-distilled-t10.0-a0.1-l12-h16-e1024-p0.1-lr1e-04.uniprot", |
|
"overwrite_output_dir": false, |
|
"do_train": false, |
|
"do_eval": false, |
|
"do_predict": false, |
|
"evaluation_strategy": "no", |
|
"prediction_loss_only": false, |
|
"per_device_train_batch_size": 1, |
|
"per_device_eval_batch_size": 8, |
|
"per_gpu_train_batch_size": null, |
|
"per_gpu_eval_batch_size": null, |
|
"gradient_accumulation_steps": 32, |
|
"eval_accumulation_steps": null, |
|
"eval_delay": 0, |
|
"learning_rate": 0.0001, |
|
"weight_decay": 0.01, |
|
"adam_beta1": 0.9, |
|
"adam_beta2": 0.999, |
|
"adam_epsilon": 1e-08, |
|
"max_grad_norm": 1.0, |
|
"num_train_epochs": 3, |
|
"max_steps": -1, |
|
"lr_scheduler_type": "linear", |
|
"lr_scheduler_kwargs": {}, |
|
"warmup_ratio": 0.0, |
|
"warmup_steps": 0, |
|
"log_level": "passive", |
|
"log_level_replica": "warning", |
|
"log_on_each_node": true, |
|
"logging_dir": "./models/protgpt2-distilled-t10.0-a0.1-l12-h16-e1024-p0.1-lr1e-04.uniprot/runs/Apr28_00-56-32_ip-172-31-34-182", |
|
"logging_strategy": "steps", |
|
"logging_first_step": false, |
|
"logging_steps": 10, |
|
"logging_nan_inf_filter": true, |
|
"save_strategy": "no", |
|
"save_steps": 500, |
|
"save_total_limit": 1, |
|
"save_safetensors": true, |
|
"save_on_each_node": false, |
|
"save_only_model": false, |
|
"no_cuda": false, |
|
"use_cpu": false, |
|
"use_mps_device": false, |
|
"seed": 42, |
|
"data_seed": null, |
|
"jit_mode_eval": false, |
|
"use_ipex": false, |
|
"bf16": false, |
|
"fp16": true, |
|
"fp16_opt_level": "O1", |
|
"half_precision_backend": "auto", |
|
"bf16_full_eval": false, |
|
"fp16_full_eval": false, |
|
"tf32": null, |
|
"local_rank": 0, |
|
"ddp_backend": null, |
|
"tpu_num_cores": null, |
|
"tpu_metrics_debug": false, |
|
"debug": [], |
|
"dataloader_drop_last": false, |
|
"eval_steps": null, |
|
"dataloader_num_workers": 0, |
|
"dataloader_prefetch_factor": null, |
|
"past_index": -1, |
|
"run_name": "./models/protgpt2-distilled-t10.0-a0.1-l12-h16-e1024-p0.1-lr1e-04.uniprot", |
|
"disable_tqdm": false, |
|
"remove_unused_columns": true, |
|
"label_names": null, |
|
"load_best_model_at_end": false, |
|
"metric_for_best_model": null, |
|
"greater_is_better": null, |
|
"ignore_data_skip": false, |
|
"fsdp": [], |
|
"fsdp_min_num_params": 0, |
|
"fsdp_config": { |
|
"min_num_params": 0, |
|
"xla": false, |
|
"xla_fsdp_v2": false, |
|
"xla_fsdp_grad_ckpt": false |
|
}, |
|
"fsdp_transformer_layer_cls_to_wrap": null, |
|
"accelerator_config": { |
|
"split_batches": false, |
|
"dispatch_batches": null, |
|
"even_batches": true, |
|
"use_seedable_sampler": true |
|
}, |
|
"deepspeed": null, |
|
"label_smoothing_factor": 0.0, |
|
"optim": "adamw_torch", |
|
"optim_args": null, |
|
"adafactor": false, |
|
"group_by_length": false, |
|
"length_column_name": "length", |
|
"report_to": [ |
|
"wandb" |
|
], |
|
"ddp_find_unused_parameters": null, |
|
"ddp_bucket_cap_mb": null, |
|
"ddp_broadcast_buffers": null, |
|
"dataloader_pin_memory": true, |
|
"dataloader_persistent_workers": false, |
|
"skip_memory_metrics": true, |
|
"use_legacy_prediction_loop": false, |
|
"push_to_hub": false, |
|
"resume_from_checkpoint": null, |
|
"hub_model_id": null, |
|
"hub_strategy": "every_save", |
|
"hub_token": "<HUB_TOKEN>", |
|
"hub_private_repo": false, |
|
"hub_always_push": false, |
|
"gradient_checkpointing": false, |
|
"gradient_checkpointing_kwargs": null, |
|
"include_inputs_for_metrics": false, |
|
"fp16_backend": "auto", |
|
"push_to_hub_model_id": null, |
|
"push_to_hub_organization": null, |
|
"push_to_hub_token": "<PUSH_TO_HUB_TOKEN>", |
|
"mp_parameters": "", |
|
"auto_find_batch_size": false, |
|
"full_determinism": false, |
|
"torchdynamo": null, |
|
"ray_scope": "last", |
|
"ddp_timeout": 1800, |
|
"torch_compile": false, |
|
"torch_compile_backend": null, |
|
"torch_compile_mode": null, |
|
"dispatch_batches": null, |
|
"split_batches": null, |
|
"include_tokens_per_second": false, |
|
"include_num_input_tokens_seen": false, |
|
"neftune_noise_alpha": null, |
|
"optim_target_modules": null |
|
}, |
|
"distillation_temperature": 10.0, |
|
"distillation_alpha": 0.1, |
|
"model_architecture": { |
|
"n_embd": 1024, |
|
"n_layer": 12, |
|
"n_head": 16 |
|
} |
|
} |