shaikehsan
commited on
Update config.json
Browse files- config.json +34 -33
config.json
CHANGED
@@ -42,37 +42,38 @@
|
|
42 |
"use_cache": true,
|
43 |
"use_sliding_window": false,
|
44 |
"vocab_size": 152064,
|
45 |
-
|
46 |
-
"
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
|
|
78 |
}
|
|
|
42 |
"use_cache": true,
|
43 |
"use_sliding_window": false,
|
44 |
"vocab_size": 152064,
|
45 |
+
|
46 |
+
"adapter_config": {
|
47 |
+
"alpha_pattern": {},
|
48 |
+
"auto_mapping": null,
|
49 |
+
"bias": "none",
|
50 |
+
"fan_in_fan_out": false,
|
51 |
+
"inference_mode": true,
|
52 |
+
"init_lora_weights": true,
|
53 |
+
"layer_replication": null,
|
54 |
+
"layers_pattern": null,
|
55 |
+
"layers_to_transform": null,
|
56 |
+
"loftq_config": {},
|
57 |
+
"lora_alpha": 16,
|
58 |
+
"lora_dropout": 0,
|
59 |
+
"megatron_config": null,
|
60 |
+
"megatron_core": "megatron.core",
|
61 |
+
"modules_to_save": null,
|
62 |
+
"peft_type": "LORA",
|
63 |
+
"r": 16,
|
64 |
+
"rank_pattern": {},
|
65 |
+
"revision": null,
|
66 |
+
"target_modules": [
|
67 |
+
"o_proj",
|
68 |
+
"v_proj",
|
69 |
+
"down_proj",
|
70 |
+
"up_proj",
|
71 |
+
"q_proj",
|
72 |
+
"gate_proj",
|
73 |
+
"k_proj"
|
74 |
+
],
|
75 |
+
"task_type": "CAUSAL_LM",
|
76 |
+
"use_dora": false,
|
77 |
+
"use_rslora": false
|
78 |
+
}
|
79 |
}
|