gwkrsrch commited on
Commit
3946036
·
verified ·
1 Parent(s): e571773

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -2
config.json CHANGED
@@ -6,8 +6,6 @@
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
- "cl_alpha": 0.1,
10
- "cl_select_layer": null,
11
  "eos_token_id": 2,
12
  "freeze_mm_mlp_adapter": false,
13
  "hidden_act": "silu",
@@ -31,6 +29,7 @@
31
  "num_attention_heads": 32,
32
  "num_hidden_layers": 22,
33
  "num_key_value_heads": 4,
 
34
  "pretraining_tp": 1,
35
  "rms_norm_eps": 1e-05,
36
  "rope_scaling": null,
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
 
 
9
  "eos_token_id": 2,
10
  "freeze_mm_mlp_adapter": false,
11
  "hidden_act": "silu",
 
29
  "num_attention_heads": 32,
30
  "num_hidden_layers": 22,
31
  "num_key_value_heads": 4,
32
+ "pad_token_id": 0,
33
  "pretraining_tp": 1,
34
  "rms_norm_eps": 1e-05,
35
  "rope_scaling": null,