{ "model_type": "mistral", "quantization": "q4f16_1", "model_config": { "hidden_size": 2560, "intermediate_size": 6912, "num_attention_heads": 32, "num_hidden_layers": 24, "rms_norm_eps": 1e-05, "vocab_size": 32000, "position_embedding_base": 10000.0, "num_key_value_heads": 8, "head_dim": 80, "context_window_size": 8192, "sliding_window_size": 8192, "prefill_chunk_size": -1, "attention_sink_size": 0, "tensor_parallel_shards": 1, "max_batch_size": 80 }, "vocab_size": 32000, "context_window_size": 8192, "sliding_window_size": 8192, "prefill_chunk_size": 8192, "attention_sink_size": 0, "tensor_parallel_shards": 1, "mean_gen_len": 128, "max_gen_len": 512, "shift_fill_factor": 0.3, "temperature": 0.7, "presence_penalty": 0.0, "frequency_penalty": 0.0, "repetition_penalty": 1.1, "top_p": 0.95, "conv_template": { "name": "LM", "system_template": "{system_message}", "system_message": "", "system_prefix_token_ids": [], "add_role_after_system_message": true, "roles": { "user": "<|prompt|>", "assistant": "<|answer|>" }, "role_templates": { "user": "{user_message}", "assistant": "{assistant_message}", "tool": "{tool_message}" }, "messages": [], "seps": [ "", "" ], "role_content_sep": "", "role_empty_sep": "", "stop_str": [""], "stop_token_ids": [ 2 ], "function_string": "", "use_function_calling": false }, "pad_token_id": 0, "bos_token_id": 1, "eos_token_id": 2, "tokenizer_files": [ "tokenizer.model", "tokenizer.json", "tokenizer_config.json" ], "version": "0.1.0" }