File size: 532 Bytes
f7ca8e4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
  "model_type": "llama",
  "hidden_size": 4096, 
  "num_attention_heads": 32,  
  "num_hidden_layers": 16,  
  "intermediate_size": 11008,  
  "hidden_act": "silu",  
  "initializer_range": 0.02,  
  "layer_norm_eps": 1e-5,  
  "vocab_size": 32000,  
  "max_position_embeddings": 2048,  
  "rms_norm_eps": 1e-6,  
  "tie_word_embeddings": false,  
  "use_cache": true,  
  "pad_token_id": 0,  
  "bos_token_id": 1,  
  "eos_token_id": 2,  
  "flash_norm_params": {
    "scaling_factor": 0.5,  
    "use_batch_norm": false  
  }
}