vishwa2488 commited on
Commit
8f13ea6
·
verified ·
1 Parent(s): a05b6b2

Training in progress, step 20

Browse files
adapter_config.json CHANGED
@@ -20,8 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "q_proj",
24
- "v_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "v_proj",
24
+ "q_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c3e1602c0ad559cf0c0c921464cd85d8011bc3b199c63e3ed3b777f4626ff8a
3
  size 10107280
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a3d60a60598814af7ad682cb8d2f7cf95368d65f14fbeb278a9366fb895fd08
3
  size 10107280
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:450a23576b6dc7d51dce2121a8d2b9f7bdad8025b59b124537353ff9d6cb3c97
3
  size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:049e34cc6725e7377561afa0830a8df3444efffd68ce06d81346ae0d29b23097
3
  size 5624