{ | |
"model_id": "llama_3.2_1b", | |
"model_arch": "llama", | |
"version": 20241202, | |
"llm_config": { | |
"embed_dim": 2048, | |
"ffn_dim": 8192, | |
"head_size": 64, | |
"kv_dim": 512, | |
"n_attn_heads": 32, | |
"n_attn_kv_heads": 8, | |
"n_ctx": 131072, | |
"n_layers": 16, | |
"norm_eps": 9.999999747378752e-06, | |
"vocab_size": 128256, | |
"rope_config": { | |
"n_rope_ctx_orig": 131072, | |
"rope_attn_factor": 1.0, | |
"rope_dim": 64, | |
"rope_freq_base": 500000.0, | |
"rope_freq_scale": 1.0, | |
"rope_scale_type": "linear", | |
"rope_type": 0 | |
} | |
} | |
} | |