{ "arch": "mini_gpt4_llama_v2", "architectures": [ "MiniGPT4_Video" ], "auto_map": { "AutoConfig": "mini_gpt4_llama_v2.minigpt4_video_config", "AutoModel": "mini_gpt4_llama_v2.MiniGPT4_Video" }, "chat_template": true, "ckpt": "checkpoints/video_mistral_all_checkpoint_last.pth", "device": "cuda", "drop_path_rate": 0, "end_sym": "", "freeze_qformer": true, "freeze_vit": true, "image_size": 224, "img_size": 224, "length": 50, "llama_model": "mistralai/Mistral-7B-Instruct-v0.2", "lora_alpha": 16, "lora_dropout": 0.05, "lora_r": 64, "lora_target_modules": [ "q_proj", "v_proj" ], "low_resource": true, "max_context_len": 7200, "max_txt_len": 512, "model_type": "minigpt4_video", "num_query_token": 32, "prompt": "", "prompt_path": "", "remove_template": false, "token_pooling": true, "torch_dtype": "float32", "transformers_version": "4.37.2", "use_grad_checkpoint": true, "use_grad_checkpoint_llm": true, "vit_model": "eva_clip_g", "vit_precision": "fp16" }