{ "_name_or_path": "microsoft/Phi-3-vision-128k-instruct", "architectures": [ "Phi3VForCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3-vision-128k-instruct--configuration_phi3_v.Phi3VConfig", "AutoModelForCausalLM": "microsoft/Phi-3-vision-128k-instruct--modeling_phi3_v.Phi3VForCausalLM" }, "bos_token_id": 1, "embd_layer": { "embedding_cls": "image", "hd_transform_order": "sub_glb", "projection_cls": "mlp", "use_hd_transform": true, "with_learnable_separator": true }, "embd_pdrop": 0.0, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 16, "img_processor": { "image_dim_out": 1024, "model_name": "openai/clip-vit-large-patch14-336", "name": "clip_vision_model", "num_img_tokens": 144 }, "initializer_range": 0.02, "intermediate_size": 32, "max_position_embeddings": 131072, "model_type": "phi3_v", "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 4, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0299, 1.0499 ], "short_factor": [ 1.05, 1.05 ], "type": "su" }, "rope_theta": 10000.0, "sliding_window": 131072, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.42.1", "use_cache": true, "vocab_size": 32064 }