Wei-K commited on
Commit
3896d32
·
1 Parent(s): 253bff8

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "q_proj",
23
+ "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cee4a6062e149ecc602c7f8cb050e16cfac105b049761a1cab07bc4d22b5ca0d
3
  size 16794200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:474779b0424474560db61e437bb35e831450641e2af20229d960bb7ec87dfd8f
3
  size 16794200
runs/Dec29_06-19-36_d0b4371a9f44/events.out.tfevents.1703832401.d0b4371a9f44.7509.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a568f498c14c5bc8e6f3bd16b8fe735c170b72680f0f88e9cffffa067aef3df0
3
+ size 5246
runs/Dec29_07-28-45_d0b4371a9f44/events.out.tfevents.1703835222.d0b4371a9f44.27784.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:236f21954a978d327615c8073e1347e93a926e8d96ed6e1273ba524a312b4ee2
3
+ size 4784
runs/Dec29_07-28-45_d0b4371a9f44/events.out.tfevents.1703835734.d0b4371a9f44.27784.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da738d168c28a1f18337c237d7780647c3950576c6c4c6273dc9790152be03a7
3
+ size 4784
runs/Dec29_07-47-41_d0b4371a9f44/events.out.tfevents.1703836214.d0b4371a9f44.32567.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaad181fbbfde93b7d8d0aca670ec087cc4704ed0e60d06ca4a2536e986a9187
3
+ size 4784
runs/Dec29_08-02-46_d0b4371a9f44/events.out.tfevents.1703837106.d0b4371a9f44.36448.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c8c8de311fb1d42375ba8c0dd58fccb750b71a9043f62a4a42b45a306e41bbb
3
+ size 6672
tokenizer_config.json CHANGED
@@ -32,12 +32,16 @@
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": false,
 
35
  "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "</s>",
37
  "padding": true,
38
  "padding_side": "right",
39
  "sp_model_kwargs": {},
 
40
  "tokenizer_class": "LlamaTokenizer",
 
 
41
  "unk_token": "<unk>",
42
  "use_default_system_prompt": false
43
  }
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": false,
35
+ "max_length": 384,
36
  "model_max_length": 1000000000000000019884624838656,
37
  "pad_token": "</s>",
38
  "padding": true,
39
  "padding_side": "right",
40
  "sp_model_kwargs": {},
41
+ "stride": 0,
42
  "tokenizer_class": "LlamaTokenizer",
43
+ "truncation_side": "right",
44
+ "truncation_strategy": "longest_first",
45
  "unk_token": "<unk>",
46
  "use_default_system_prompt": false
47
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e68a095d2f7edb60135d6d9fe26d77cfac09d174b01ffc68fd4cb8a1685268f1
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a3a4ada5a4bb48c73d15512fe6b941fd5ede57a559baaccedce59fe40f4e396
3
  size 4728