finetuned_mambavl_790m / config.json
gpantaz's picture
Upload config.json with huggingface_hub
43e578f verified
raw
history blame contribute delete
386 Bytes
{
"architectures": [
"VLMambaLMHeadModel"
],
"d_model": 1536,
"fused_add_norm": true,
"hidden_size": 1536,
"image_size": 224,
"n_layer": 48,
"num_channels": 3,
"pad_vocab_size_multiple": 8,
"patch_size": 32,
"residual_in_fp32": true,
"rms_norm": true,
"ssm_cfg": {},
"torch_dtype": "bfloat16",
"transformers_version": "4.37.1",
"vocab_size": 50280
}