nblinh63 commited on
Commit
ff6b41d
·
verified ·
1 Parent(s): 3fe4e0d

End of training

Browse files
README.md CHANGED
@@ -105,7 +105,7 @@ xformers_attention: true
105
 
106
  This model is a fine-tuned version of [openlm-research/open_llama_3b](https://huggingface.co/openlm-research/open_llama_3b) on the None dataset.
107
  It achieves the following results on the evaluation set:
108
- - Loss: 3.8126
109
 
110
  ## Model description
111
 
@@ -137,7 +137,7 @@ The following hyperparameters were used during training:
137
 
138
  | Training Loss | Epoch | Step | Validation Loss |
139
  |:-------------:|:------:|:----:|:---------------:|
140
- | 4.4013 | 0.0002 | 10 | 3.8126 |
141
 
142
 
143
  ### Framework versions
 
105
 
106
  This model is a fine-tuned version of [openlm-research/open_llama_3b](https://huggingface.co/openlm-research/open_llama_3b) on the None dataset.
107
  It achieves the following results on the evaluation set:
108
+ - Loss: 3.8115
109
 
110
  ## Model description
111
 
 
137
 
138
  | Training Loss | Epoch | Step | Validation Loss |
139
  |:-------------:|:------:|:----:|:---------------:|
140
+ | 4.5942 | 0.0002 | 10 | 3.8115 |
141
 
142
 
143
  ### Framework versions
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "o_proj",
24
- "down_proj",
25
- "q_proj",
26
- "gate_proj",
27
  "v_proj",
28
  "up_proj",
29
- "k_proj"
 
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "k_proj",
 
 
 
24
  "v_proj",
25
  "up_proj",
26
+ "q_proj",
27
+ "gate_proj",
28
+ "down_proj",
29
+ "o_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb082aff772c33d961204a180dafea9c358faadc0381d1f77f48dac5c5bfbb44
3
  size 101834682
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9fe8aa2ae95814df1f7d249a9f6e5982ddd2cd74a5576cce2de853a54ec205e
3
  size 101834682
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1cf18823b417f9fd583037d8c61abe93adbec260a1295cea794603f0629a5cbb
3
  size 101752088
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9211a8620b45592ba317d9694fc20e70aaad84572ae76cb7eb9b97b525a9d4f2
3
  size 101752088
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:14f5fe38cf0db07db6dd3e88fc90e63160bab952b33e1ba62b35683431cf5712
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5af9315b48d12d2e085b69aa1ffdc3773f47d9c8b157a1c601b3170d55e75d6f
3
  size 6776