Sarah Ciston commited on
Commit
cf748ec
·
1 Parent(s): 558b7ef

try with top_k roberta model

Browse files
Files changed (2) hide show
  1. README.md +1 -0
  2. sketch.js +37 -27
README.md CHANGED
@@ -12,6 +12,7 @@ hf_oauth_scopes:
12
  - inference-api
13
  models:
14
  - bert-base-uncased
 
15
  # - gpt-3.5-turbo
16
  # - bigscience/bloom-560m
17
  # - Xenova/distilgpt2
 
12
  - inference-api
13
  models:
14
  - bert-base-uncased
15
+ - distilroberta-base
16
  # - gpt-3.5-turbo
17
  # - bigscience/bloom-560m
18
  # - Xenova/distilgpt2
sketch.js CHANGED
@@ -77,7 +77,7 @@ new p5(function (p5) {
77
  // promptInput.position(0,160)
78
  promptInput.size(600);
79
  promptInput.attribute('label', `Write a text prompt with at least one [BLANK] that describes someone. You can also write [FILL] where you want the bot to fill in a word on its own.`)
80
- promptInput.value(`The [BLANK] works as a [MASK] but ...`)
81
  promptInput.addClass("prompt")
82
  p5.createP(promptInput.attribute('label'))
83
  // .position(0,100)
@@ -158,7 +158,6 @@ new p5(function (p5) {
158
  // // Please return an array of sentences based on the sample sentence to follow. In each sentence,
159
 
160
  // // let modelResult = await runModel(PREPROMPT, PROMPT)
161
- // let modelResult = await runModel(BLANKSVALUES, PROMPT)
162
 
163
  // await displayModel(modelResult)
164
  // }
@@ -166,33 +165,36 @@ new p5(function (p5) {
166
  // creating multiple prompt inputs rather than instructing model to do so
167
  async function getInputs(){
168
  // Map the list of blanks text values to a new list
169
- let BLANKSVALUES = blanksArray.map(i => i.value())
170
- console.log(BLANKSVALUES)
171
 
172
  // Do model stuff in this function instead of in general
173
  let PROMPT = promptInput.value() // updated check of the prompt field
174
 
175
  // BLANKS = inputValues // get ready to feed array list into model
176
 
177
- let PROMPTS = []
178
- for (let b in BLANKSVALUES){
179
- console.log(BLANKSVALUES[b])
180
- let p = PROMPT.replace('[BLANK]', `${BLANKSVALUES[b]}`)
181
- console.log(p)
182
- PROMPTS.push(p)
183
- }
184
- console.log(PROMPTS)
 
185
 
186
  // let PREPROMPT = `In the sentence I provide, please fill in the [BLANK] with each word in the array ${BLANKSVALUES}, replace any [MASK] with a word of your choice. Here is the SAMPLE SENTENCE: `
187
 
188
  // we pass PROMPT and PREPROMPT to the model function, don't need to pass BLANKSVALUES bc it's passed into the PREPROMPT already here
189
 
190
  // Please return an array of sentences based on the sample sentence to follow. In each sentence,
191
-
 
192
  // let modelResult = await runModel(PREPROMPT, PROMPT)
193
- let modelResult = await runModel(PROMPTS)
194
-
195
- await displayModel(resultsArray[0], resultsArray[1])
 
196
  }
197
 
198
  async function displayModel(m){
@@ -346,20 +348,28 @@ new p5(function (p5) {
346
  // }
347
 
348
 
349
- async function runModel(PROMPTS){
 
350
 
351
- let MODELNAME = "bert-base-uncased"
 
352
 
353
- let unmasker = await pipeline('fill-mask', 'bert-base-uncased')
354
 
355
- for (let p in PROMPTS){
356
- var res = unmasker(p)
357
- console.log(res)
358
 
359
- var modelResult = res[0].token_str
360
- console.log(modelResult)
361
 
362
- resultsArray.push(modelResult)
363
- }
364
- return resultsArray
 
 
 
 
 
 
 
 
 
365
  }
 
77
  // promptInput.position(0,160)
78
  promptInput.size(600);
79
  promptInput.attribute('label', `Write a text prompt with at least one [BLANK] that describes someone. You can also write [FILL] where you want the bot to fill in a word on its own.`)
80
+ promptInput.value(`The [BLANK] works as a [mask] but ...`)
81
  promptInput.addClass("prompt")
82
  p5.createP(promptInput.attribute('label'))
83
  // .position(0,100)
 
158
  // // Please return an array of sentences based on the sample sentence to follow. In each sentence,
159
 
160
  // // let modelResult = await runModel(PREPROMPT, PROMPT)
 
161
 
162
  // await displayModel(modelResult)
163
  // }
 
165
  // creating multiple prompt inputs rather than instructing model to do so
166
  async function getInputs(){
167
  // Map the list of blanks text values to a new list
168
+ // let BLANKSVALUES = blanksArray.map(i => i.value())
169
+ // console.log(BLANKSVALUES)
170
 
171
  // Do model stuff in this function instead of in general
172
  let PROMPT = promptInput.value() // updated check of the prompt field
173
 
174
  // BLANKS = inputValues // get ready to feed array list into model
175
 
176
+ // for running MULTIPLE PROMPTS AT ONCE
177
+ // let PROMPTS = []
178
+ // for (let b in BLANKSVALUES){
179
+ // console.log(BLANKSVALUES[b])
180
+ // let p = PROMPT.replace('[BLANK]', `${BLANKSVALUES[b]}`)
181
+ // console.log(p)
182
+ // PROMPTS.push(p)
183
+ // }
184
+ // console.log(PROMPTS)
185
 
186
  // let PREPROMPT = `In the sentence I provide, please fill in the [BLANK] with each word in the array ${BLANKSVALUES}, replace any [MASK] with a word of your choice. Here is the SAMPLE SENTENCE: `
187
 
188
  // we pass PROMPT and PREPROMPT to the model function, don't need to pass BLANKSVALUES bc it's passed into the PREPROMPT already here
189
 
190
  // Please return an array of sentences based on the sample sentence to follow. In each sentence,
191
+
192
+ let modelResult = await runModel(PROMPT)
193
  // let modelResult = await runModel(PREPROMPT, PROMPT)
194
+ // let modelResult = await runModel(PROMPTS)
195
+
196
+ await displayModel(modelResult)
197
+ // await displayModel(resultsArray[0], resultsArray[1])
198
  }
199
 
200
  async function displayModel(m){
 
348
  // }
349
 
350
 
351
+ // async function runModel(PROMPTS){
352
+ async function runModel(PROMPT){
353
 
354
+ // let MODELNAME = "bert-base-uncased"
355
+ let MODELNAME = 'distilroberta-base'
356
 
357
+ let unmasker = await pipeline('fill-mask', MODELNAME)
358
 
359
+ let res = unmasker(PROMPT, top_k=5)
 
 
360
 
361
+ var modelResult = res
 
362
 
363
+ return modelResult
364
+
365
+ // for (let p in PROMPTS){
366
+ // var res = unmasker(p)
367
+ // console.log(res)
368
+
369
+ // var modelResult = res[0].token_str
370
+ // console.log(modelResult)
371
+
372
+ // resultsArray.push(modelResult)
373
+ // }
374
+ // return resultsArray
375
  }