DongfuJiang commited on
Commit
c863612
1 Parent(s): a14c9d5

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +60 -0
config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "TIGER-Lab/Mantis-8B-siglip-llama3-pretraind",
3
+ "architectures": [
4
+ "LlavaForConditionalGeneration"
5
+ ],
6
+ "ignore_index": -100,
7
+ "image_token_index": 128256,
8
+ "model_type": "llava",
9
+ "pad_token_id": 128257,
10
+ "projector_hidden_act": "gelu",
11
+ "quantization_config": {
12
+ "_load_in_4bit": true,
13
+ "_load_in_8bit": false,
14
+ "bnb_4bit_compute_dtype": "bfloat16",
15
+ "bnb_4bit_quant_storage": "bfloat16",
16
+ "bnb_4bit_quant_type": "nf4",
17
+ "bnb_4bit_use_double_quant": true,
18
+ "llm_int8_enable_fp32_cpu_offload": false,
19
+ "llm_int8_has_fp16_weight": false,
20
+ "llm_int8_skip_modules": [
21
+ "vision_tower"
22
+ ],
23
+ "llm_int8_threshold": 6.0,
24
+ "load_in_4bit": true,
25
+ "load_in_8bit": false,
26
+ "quant_method": "bitsandbytes"
27
+ },
28
+ "text_config": {
29
+ "_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
30
+ "architectures": [
31
+ "LlamaForCausalLM"
32
+ ],
33
+ "bos_token_id": 128000,
34
+ "eos_token_id": 128001,
35
+ "intermediate_size": 14336,
36
+ "max_position_embeddings": 8192,
37
+ "model_type": "llama",
38
+ "num_key_value_heads": 8,
39
+ "rms_norm_eps": 1e-05,
40
+ "rope_theta": 500000.0,
41
+ "torch_dtype": "bfloat16",
42
+ "vocab_size": 128258
43
+ },
44
+ "torch_dtype": "bfloat16",
45
+ "transformers_version": "4.41.0",
46
+ "vision_config": {
47
+ "hidden_act": "gelu_pytorch_tanh",
48
+ "hidden_size": 1152,
49
+ "image_size": 384,
50
+ "intermediate_size": 4304,
51
+ "layer_norm_eps": 1e-06,
52
+ "model_type": "siglip_vision_model",
53
+ "num_attention_heads": 16,
54
+ "num_hidden_layers": 27,
55
+ "patch_size": 14
56
+ },
57
+ "vision_feature_layer": -2,
58
+ "vision_feature_select_strategy": "default",
59
+ "vocab_size": 128258
60
+ }