joonavel commited on
Commit
ebcbd64
·
verified ·
1 Parent(s): 9de3576

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +47 -24
README.md CHANGED
@@ -43,6 +43,34 @@ This adapter was created through **instruction tuning**.
43
  <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
44
  To use this LoRA adapter, refer to the following code:
45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  ### Prompt
47
 
48
  <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
@@ -60,29 +88,6 @@ GENERATE_QUERY_INSTRUCTIONS = """
60
  """
61
  ```
62
 
63
- ### Preprocess Functions
64
-
65
- ```
66
- def get_conversation_data(examples):
67
- questions = examples['question']
68
- schemas =examples['schema']
69
- sql_queries =examples['SQL']
70
- convos = []
71
- for question, schema, sql in zip(questions, schemas, sql_queries):
72
- conv = [
73
- {"role": "system", "content": GENERAL_QUERY_PREFIX.format(context=schema) + GENERATE_QUERY_INSTRUCTIONS},
74
- {"role": "user", "content": question},
75
- {"role": "assistant", "content": "```sql\n"+sql+";\n```"}
76
- ]
77
- convos.append(conv)
78
- return {"conversation":convos,}
79
-
80
- def formatting_prompts_func(examples):
81
- convos = examples["conversation"]
82
- texts = [tokenizer.apply_chat_template(convo, tokenize = False, add_generation_prompt = False) for convo in convos]
83
- return { "text" : texts, }
84
- ```
85
-
86
  ### Example input
87
 
88
  ```
@@ -164,9 +169,27 @@ Users (both direct and downstream) should be made aware of the risks, biases and
164
  ```
165
  ```
166
 
167
- #### Preprocessing [optional]
168
 
169
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
170
  ```
171
 
172
  #### Training Hyperparameters
 
43
  <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
44
  To use this LoRA adapter, refer to the following code:
45
 
46
+ ### Load Apdater
47
+
48
+ ```
49
+ from transformers import BitsAndBytesConfig
50
+
51
+ def get_bnb_config(bit=8):
52
+ if bit == 8:
53
+ return BitsAndBytesConfig(load_in_8bit=True)
54
+ else:
55
+ print(f"You put {bit} bit in argument.\nWhatever the number you put in, if it is not 8 then 4bit config would be returned.")
56
+ return BitsAndBytesConfig(load_in_4bit=True)
57
+ ```
58
+
59
+ ```
60
+ from unsloth import FastLanguageModel
61
+
62
+ model_name = "unsloth/Qwen2.5-Coder-32B-Instruct"
63
+ adapter_revision = "checkpoint-200" # checkpoint-100 ~ 350, main(which is checkpoint-384)
64
+
65
+ bnb_config = get_bnb_config(bit=8)
66
+ model, tokenizer = FastLanguageModel.from_pretrained(
67
+ model_name=model_name,
68
+ dtype=None,
69
+ quantization_config=bnb_config,
70
+ )
71
+ model.load_adapter("100suping/Qwen2.5-Coder-34B-Instruct-kosql-adapter", revision=adapter_revision)
72
+ ```
73
+
74
  ### Prompt
75
 
76
  <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
 
88
  """
89
  ```
90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  ### Example input
92
 
93
  ```
 
169
  ```
170
  ```
171
 
172
+ ### Preprocess Functions
173
 
174
  ```
175
+ def get_conversation_data(examples):
176
+ questions = examples['question']
177
+ schemas =examples['schema']
178
+ sql_queries =examples['SQL']
179
+ convos = []
180
+ for question, schema, sql in zip(questions, schemas, sql_queries):
181
+ conv = [
182
+ {"role": "system", "content": GENERAL_QUERY_PREFIX.format(context=schema) + GENERATE_QUERY_INSTRUCTIONS},
183
+ {"role": "user", "content": question},
184
+ {"role": "assistant", "content": "```sql\n"+sql+";\n```"}
185
+ ]
186
+ convos.append(conv)
187
+ return {"conversation":convos,}
188
+
189
+ def formatting_prompts_func(examples):
190
+ convos = examples["conversation"]
191
+ texts = [tokenizer.apply_chat_template(convo, tokenize = False, add_generation_prompt = False) for convo in convos]
192
+ return { "text" : texts, }
193
  ```
194
 
195
  #### Training Hyperparameters