danielhanchen commited on
Commit
44f940b
·
verified ·
1 Parent(s): 2dd9863

Upload Gemma2ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +4 -1
  2. generation_config.json +4 -1
config.json CHANGED
@@ -8,7 +8,10 @@
8
  "attn_logit_softcapping": 50.0,
9
  "bos_token_id": 2,
10
  "cache_implementation": "hybrid",
11
- "eos_token_id": 1,
 
 
 
12
  "final_logit_softcapping": 30.0,
13
  "head_dim": 256,
14
  "hidden_act": "gelu_pytorch_tanh",
 
8
  "attn_logit_softcapping": 50.0,
9
  "bos_token_id": 2,
10
  "cache_implementation": "hybrid",
11
+ "eos_token_id": [
12
+ 1,
13
+ 107
14
+ ],
15
  "final_logit_softcapping": 30.0,
16
  "head_dim": 256,
17
  "hidden_act": "gelu_pytorch_tanh",
generation_config.json CHANGED
@@ -2,7 +2,10 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 2,
4
  "cache_implementation": "hybrid",
5
- "eos_token_id": 1,
 
 
 
6
  "pad_token_id": 0,
7
  "transformers_version": "4.43.3"
8
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 2,
4
  "cache_implementation": "hybrid",
5
+ "eos_token_id": [
6
+ 1,
7
+ 107
8
+ ],
9
  "pad_token_id": 0,
10
  "transformers_version": "4.43.3"
11
  }