apepkuss79 commited on
Commit
58f7d47
·
verified ·
1 Parent(s): c8bf68e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +12 -4
README.md CHANGED
@@ -28,12 +28,20 @@ library_name: transformers
28
 
29
  - Prompt template
30
 
31
- - Prompt type: `deepseek-chat-25`
32
 
33
  - Prompt string
34
 
35
  ```text
36
- <|begin_of_sentence|>{system_message}<|User|>{user_message_1}<|Assistant|>{assistant_message_1}<|end_of_sentence|><|User|>{user_message_2}<|Assistant|>
 
 
 
 
 
 
 
 
37
  ```
38
 
39
  - Context size: `128000`
@@ -43,7 +51,7 @@ library_name: transformers
43
  ```bash
44
  wasmedge --dir .:. --nn-preload default:GGML:AUTO:DeepSeek-R1-Distill-Llama-8B-Q5_K_M.gguf \
45
  llama-api-server.wasm \
46
- --prompt-template deepseek-chat-25 \
47
  --ctx-size 128000 \
48
  --model-name DeepSeek-R1-Distill-Llama-8B
49
  ```
@@ -53,7 +61,7 @@ library_name: transformers
53
  ```bash
54
  wasmedge --dir .:. --nn-preload default:GGML:AUTO:DeepSeek-R1-Distill-Llama-8B-Q5_K_M.gguf \
55
  llama-chat.wasm \
56
- --prompt-template deepseek-chat-25 \
57
  --ctx-size 128000
58
  ```
59
 
 
28
 
29
  - Prompt template
30
 
31
+ - Prompt type: `llama-3-chat`
32
 
33
  - Prompt string
34
 
35
  ```text
36
+ <|begin_of_text|><|start_header_id|>system<|end_header_id|>
37
+
38
+ {{ system_prompt }}<|eot_id|><|start_header_id|>user<|end_header_id|>
39
+
40
+ {{ user_message_1 }}<|eot_id|><|start_header_id|>assistant<|end_header_id|>
41
+
42
+ {{ model_answer_1 }}<|eot_id|><|start_header_id|>user<|end_header_id|>
43
+
44
+ {{ user_message_2 }}<|eot_id|><|start_header_id|>assistant<|end_header_id|>
45
  ```
46
 
47
  - Context size: `128000`
 
51
  ```bash
52
  wasmedge --dir .:. --nn-preload default:GGML:AUTO:DeepSeek-R1-Distill-Llama-8B-Q5_K_M.gguf \
53
  llama-api-server.wasm \
54
+ --prompt-template llama-3-chat \
55
  --ctx-size 128000 \
56
  --model-name DeepSeek-R1-Distill-Llama-8B
57
  ```
 
61
  ```bash
62
  wasmedge --dir .:. --nn-preload default:GGML:AUTO:DeepSeek-R1-Distill-Llama-8B-Q5_K_M.gguf \
63
  llama-chat.wasm \
64
+ --prompt-template llama-3-chat \
65
  --ctx-size 128000
66
  ```
67