8Spark commited on
Commit
768bb0c
·
verified ·
1 Parent(s): 6c2b141

ai-maker-space/llama3181binstruct_translate_v1

Browse files
README.md CHANGED
@@ -27,7 +27,7 @@ print(output["generated_text"])
27
 
28
  ## Training procedure
29
 
30
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/8spark/huggingface/runs/few9af5k)
31
 
32
  This model was trained with SFT.
33
 
@@ -35,7 +35,7 @@ This model was trained with SFT.
35
 
36
  - TRL: 0.12.2
37
  - Transformers: 4.46.3
38
- - Pytorch: 2.5.1+cu121
39
  - Datasets: 3.2.0
40
  - Tokenizers: 0.20.3
41
 
 
27
 
28
  ## Training procedure
29
 
30
+
31
 
32
  This model was trained with SFT.
33
 
 
35
 
36
  - TRL: 0.12.2
37
  - Transformers: 4.46.3
38
+ - Pytorch: 2.5.1+cu118
39
  - Datasets: 3.2.0
40
  - Tokenizers: 0.20.3
41
 
adapter_config.json CHANGED
@@ -1,37 +1,37 @@
1
- {
2
- "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
5
- "bias": "none",
6
- "eva_config": null,
7
- "exclude_modules": null,
8
- "fan_in_fan_out": false,
9
- "inference_mode": true,
10
- "init_lora_weights": true,
11
- "layer_replication": null,
12
- "layers_pattern": null,
13
- "layers_to_transform": null,
14
- "loftq_config": {},
15
- "lora_alpha": 32,
16
- "lora_bias": false,
17
- "lora_dropout": 0.1,
18
- "megatron_config": null,
19
- "megatron_core": "megatron.core",
20
- "modules_to_save": null,
21
- "peft_type": "LORA",
22
- "r": 16,
23
- "rank_pattern": {},
24
- "revision": null,
25
- "target_modules": [
26
- "o_proj",
27
- "v_proj",
28
- "up_proj",
29
- "k_proj",
30
- "down_proj",
31
- "gate_proj",
32
- "q_proj"
33
- ],
34
- "task_type": "CAUSAL_LM",
35
- "use_dora": false,
36
- "use_rslora": false
37
  }
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
5
+ "bias": "none",
6
+ "eva_config": null,
7
+ "exclude_modules": null,
8
+ "fan_in_fan_out": false,
9
+ "inference_mode": true,
10
+ "init_lora_weights": true,
11
+ "layer_replication": null,
12
+ "layers_pattern": null,
13
+ "layers_to_transform": null,
14
+ "loftq_config": {},
15
+ "lora_alpha": 32,
16
+ "lora_bias": false,
17
+ "lora_dropout": 0.1,
18
+ "megatron_config": null,
19
+ "megatron_core": "megatron.core",
20
+ "modules_to_save": null,
21
+ "peft_type": "LORA",
22
+ "r": 16,
23
+ "rank_pattern": {},
24
+ "revision": null,
25
+ "target_modules": [
26
+ "o_proj",
27
+ "q_proj",
28
+ "k_proj",
29
+ "gate_proj",
30
+ "v_proj",
31
+ "up_proj",
32
+ "down_proj"
33
+ ],
34
+ "task_type": "CAUSAL_LM",
35
+ "use_dora": false,
36
+ "use_rslora": false
37
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d68e60f1785171d40a127f907171593bb136c3a119c3086846ada8188582042
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2169bfcaff2c7c055f6c2f9a72f9eb3a42d5bf631a2d63b14491059f7ddb7a1
3
  size 167832240
special_tokens_map.json CHANGED
@@ -1,17 +1,17 @@
1
- {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|eot_id|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<|eot_id|>"
17
- }
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|eot_id|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|eot_id|>"
17
+ }
tokenizer_config.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf89cfcb413406ebe7642c88bfd71f618ced59137e65bf38cca4a7e0dd2153a6
3
- size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dc98c234af0f9529780508363207e48e6a054e92d89cf1b782d320a05e73e02
3
+ size 5560