File size: 110 Bytes
9f2a525
6512a0d
 
 
1c557af
fffb125
044a59e
1
2
3
4
5
6
7
{
    "architectures": [
      "LlamaForCausalLM"
    ],
    "model_type": "llama",
    "vocab_size": 128256
}