shaikehsan commited on
Commit
82e138b
·
verified ·
1 Parent(s): 4de5a49

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +34 -33
config.json CHANGED
@@ -42,37 +42,38 @@
42
  "use_cache": true,
43
  "use_sliding_window": false,
44
  "vocab_size": 152064,
45
-
46
- "alpha_pattern": {},
47
- "auto_mapping": null,
48
- "base_model_name_or_path": "unsloth/qwen2.5-coder-14b-instruct-bnb-4bit",
49
- "bias": "none",
50
- "fan_in_fan_out": false,
51
- "inference_mode": true,
52
- "init_lora_weights": true,
53
- "layer_replication": null,
54
- "layers_pattern": null,
55
- "layers_to_transform": null,
56
- "loftq_config": {},
57
- "lora_alpha": 16,
58
- "lora_dropout": 0,
59
- "megatron_config": null,
60
- "megatron_core": "megatron.core",
61
- "modules_to_save": null,
62
- "peft_type": "LORA",
63
- "r": 16,
64
- "rank_pattern": {},
65
- "revision": null,
66
- "target_modules": [
67
- "o_proj",
68
- "v_proj",
69
- "down_proj",
70
- "up_proj",
71
- "q_proj",
72
- "gate_proj",
73
- "k_proj"
74
- ],
75
- "task_type": "CAUSAL_LM",
76
- "use_dora": false,
77
- "use_rslora": false
 
78
  }
 
42
  "use_cache": true,
43
  "use_sliding_window": false,
44
  "vocab_size": 152064,
45
+
46
+ "adapter_config": {
47
+ "alpha_pattern": {},
48
+ "auto_mapping": null,
49
+ "bias": "none",
50
+ "fan_in_fan_out": false,
51
+ "inference_mode": true,
52
+ "init_lora_weights": true,
53
+ "layer_replication": null,
54
+ "layers_pattern": null,
55
+ "layers_to_transform": null,
56
+ "loftq_config": {},
57
+ "lora_alpha": 16,
58
+ "lora_dropout": 0,
59
+ "megatron_config": null,
60
+ "megatron_core": "megatron.core",
61
+ "modules_to_save": null,
62
+ "peft_type": "LORA",
63
+ "r": 16,
64
+ "rank_pattern": {},
65
+ "revision": null,
66
+ "target_modules": [
67
+ "o_proj",
68
+ "v_proj",
69
+ "down_proj",
70
+ "up_proj",
71
+ "q_proj",
72
+ "gate_proj",
73
+ "k_proj"
74
+ ],
75
+ "task_type": "CAUSAL_LM",
76
+ "use_dora": false,
77
+ "use_rslora": false
78
+ }
79
  }