tiny-random-snowflake / config.json
katuni4ka's picture
Upload 7 files
bc0dc25 verified
{
"_name_or_path": "/home/ea/work/snowflake",
"architectures": [
"ArcticForCausalLM"
],
"attention_dropout": 0,
"auto_map": {
"AutoConfig": "configuration_arctic.ArcticConfig",
"AutoModel": "modeling_arctic.ArcticModel",
"AutoModelForCausalLM": "modeling_arctic.ArcticForCausalLM",
"AutoModelForSequenceClassification": "modeling_arctic.ArcticForSequenceClassification"
},
"bos_token_id": 1,
"enable_expert_tensor_parallelism": false,
"enc_index": [
0,
1,
2,
3
],
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 32,
"initializer_range": 0.02,
"intermediate_size": 16,
"max_position_embeddings": 128,
"max_sequence_length": 128,
"model_type": "arctic",
"moe_eval_capacity_factor": 1,
"moe_layer_frequency": 1,
"moe_min_capacity": 0,
"moe_token_dropping": true,
"moe_train_capacity_factor": 1,
"num_attention_heads": 4,
"num_experts_per_tok": 2,
"num_hidden_layers": 4,
"num_key_value_heads": 4,
"num_local_experts": 4,
"parallel_attn_mlp_res": true,
"quantization": null,
"rms_norm_eps": 1e-05,
"rope_theta": 10000,
"router_aux_loss_coef": 0.001,
"sliding_window": null,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.40.2",
"use_cache": true,
"use_residual": true,
"vocab_size": 32000
}