vxbrandon's picture
End of training
628157b verified
{
"_name_or_path": "meta-llama/Llama-2-7b-hf",
"architectures": [
"SparseLlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "ugly_utils.SparseLlamaConfig",
"AutoModelForCausalLM": "ugly_utils.SparseLlamaForCausalLM"
},
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 11008,
"max_position_embeddings": 4096,
"model_type": "sparse_llama",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000.0,
"thresholds": [
0.021063182502985,
0.029087254777550697,
0.03309928998351097,
0.04513540118932724,
0.061183542013168335,
0.07723169028759003,
0.08324974030256271,
0.09127381443977356,
0.08726178109645844,
0.09127381443977356,
0.09729187190532684,
0.09528584778308868,
0.0992978885769844,
0.10531593859195709,
0.10732196271419525,
0.11334001272916794,
0.11935807019472122,
0.11735205352306366,
0.12136408686637878,
0.11133399605751038,
0.10531593859195709,
0.10130390524864197,
0.10130390524864197,
0.10330992192029953,
0.10531593859195709,
0.11133399605751038,
0.1153460294008255,
0.12337010353803635,
0.13340020179748535,
0.14744232594966888,
0.1574724167585373,
0.15947842597961426
],
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.36.2",
"us_sparse_regularization": false,
"use_cache": false,
"use_graceful_regularization": false,
"use_relu": false,
"use_sparse_model": true,
"use_sparse_predictor": false,
"use_sparse_regularization": false,
"vocab_size": 32000
}