Sarah Ciston
debug reorder
581179a
raw
history blame
8.67 kB
// import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/[email protected]';
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
const inference = new HfInference();
// let pipe = await pipeline('text-generation', 'mistralai/Mistral-7B-Instruct-v0.2');
// models('Xenova/gpt2', 'Xenova/gpt-3.5-turbo', 'mistralai/Mistral-7B-Instruct-v0.2', 'Xenova/llama-68m', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')
// list of models by task: 'https://huggingface.co/docs/transformers.js/index#supported-tasksmodels'
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
// env.allowLocalModels = false;
///////// VARIABLES
let promptResult, maskAResult, maskBResult, maskCResult, promptButton, buttonButton, promptInput, maskInputA, maskInputB, maskInputC, modelDisplay, modelResult
// const detector = await pipeline('text-generation', 'meta-llama/Meta-Llama-3-8B', 'Xenova/LaMini-Flan-T5-783M');
let MODELNAME = 'Xenova/gpt-3.5-turbo'
// models('Xenova/gpt2', 'Xenova/gpt-3.5-turbo', 'mistralai/Mistral-7B-Instruct-v0.2', 'Xenova/llama-68m', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')
var PREPROMPT = `Return an array of sentences. In each sentence, fill in the [BLANK] in the following sentence with each word I provide in the array ${inputArray}. Replace any [FILL] with an appropriate word of your choice.`
///// p5 STUFF
new p5(function(p5){
p5.setup = function(){
console.log('p5 loaded')
p5.noCanvas()
makeInterface()
// let canvas = p5.createCanvas(200,200)
// canvas.position(300, 1000);
// p5.background(200)
// p5.textSize(20)
// p5.textAlign(p5.CENTER,p5.CENTER)
}
p5.draw = function(){
//
}
window.onload = function(){
console.log('sketchfile loaded')
}
function makeInterface(){
console.log('got to make interface')
let title = p5.createElement('h1', 'p5.js Critical AI Prompt Battle')
title.position(0,50)
promptInput = p5.createInput("")
promptInput.position(0,160)
promptInput.size(500);
promptInput.attribute('label', `Write a text prompt with at least one [BLANK] that describes someone. You can also write [FILL] where you want the bot to fill in a word.`)
promptInput.value(`For example: "The [BLANK] has a job as a ...`)
promptInput.elt.style.fontSize = "15px";
p5.createP(promptInput.attribute('label')).position(0,100)
// p5.createP(`For example: "The BLANK has a job as a MASK where their favorite thing to do is ...`)
//make for loop to generate
//make a button to make another
//add them to the list of items
maskInputA = p5.createInput("");
maskInputA.position(0, 240);
maskInputA.size(200);
maskInputA.elt.style.fontSize = "15px";
maskAResult = maskInputA.value()
maskInputA.changed()
maskInputB = p5.createInput("");
maskInputB.position(0, 270);
maskInputB.size(200);
maskInputB.elt.style.fontSize = "15px";
maskBResult = maskInputB.value()
maskInputC = p5.createInput("");
maskInputC.position(0, 300);
maskInputC.size(200);
maskInputC.elt.style.fontSize = "15px";
maskCResult = maskInputC.value()
// modelDisplay = p5.createElement("p", "Results:");
// modelDisplay.position(0, 380);
// // setTimeout(() => {
// modelDisplay.html(modelResult)
// // }, 2000);
//a model drop down list?
//GO BUTTON
promptButton = p5.createButton("GO").position(0, 340);
promptButton.position(0, 340);
promptButton.elt.style.fontSize = "15px";
promptButton.mousePressed(test)
// promptInput.changed(test)
// maskInputA.changed(test)
// maskInputB.changed(test)
// maskInputC.changed(test)
// describe(``)
// TO-DO alt-text description
}
function test(){
console.log('did something')
console.log(promptInput.value())
}
// var modelResult = promptButton.mousePressed(runModel) = function(){
// // listens for the button to be clicked
// // run the prompt through the model here
// // modelResult = runModel()
// // return modelResult
// runModel()
// }
// function makeInput(i){
// i = p5.createInput("");
// i.position(0, 300); //append to last input and move buttons down
// i.size(200);
// i.elt.style.fontSize = "15px";
// }
});
///// MODEL STUFF
var PROMPT = `The [BLANK] works as a [FILL] but wishes for [FILL].`
// /// this needs to run on button click, use string variables to fill in the form
// var PROMPT = `${promptResult}`
var inputArray = ["mother", "father", "sister", "brother"]
// // for num of inputs put in list
// var inputArray = [`${maskAResult}`, `${maskBResult}`, `${maskCResult}`]
// async function runModel(){
// // Chat completion API
// const out = await inference.chatCompletion({
// model: MODELNAME,
// // model: "google/gemma-2-9b",
// messages: [{ role: "user", content: PREPROMPT + PROMPT }],
// max_tokens: 100
// });
// // let out = await pipe(PREPROMPT + PROMPT)
// // let out = await pipe(PREPROMPT + PROMPT, {
// // max_new_tokens: 250,
// // temperature: 0.9,
// // // return_full_text: False,
// // repetition_penalty: 1.5,
// // // no_repeat_ngram_size: 2,
// // // num_beams: 2,
// // num_return_sequences: 1
// // });
// console.log(out)
// var modelResult = await out.choices[0].message.content
// // var modelResult = await out[0].generated_text
// console.log(modelResult);
// return modelResult
// }
// Reference the elements that we will need
// const status = document.getElementById('status');
// const fileUpload = document.getElementById('upload');
// const imageContainer = document.getElementById('container');
// const example = document.getElementById('example');
// const EXAMPLE_URL = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/city-streets.jpg';
// Create a new object detection pipeline
// status.textContent = 'Loading model...';
// const detector = await pipeline('object-detection', 'Xenova/detr-resnet-50');
// status.textContent = 'Ready';
// example.addEventListener('click', (e) => {
// e.preventDefault();
// detect(EXAMPLE_URL);
// });
// fileUpload.addEventListener('change', function (e) {
// const file = e.target.files[0];
// if (!file) {
// return;
// }
// const reader = new FileReader();
// // Set up a callback when the file is loaded
// reader.onload = e2 => detect(e2.target.result);
// reader.readAsDataURL(file);
// });
// // Detect objects in the image
// async function detect(img) {
// imageContainer.innerHTML = '';
// imageContainer.style.backgroundImage = `url(${img})`;
// status.textContent = 'Analysing...';
// const output = await detector(img, {
// threshold: 0.5,
// percentage: true,
// });
// status.textContent = '';
// output.forEach(renderBox);
// }
// // Render a bounding box and label on the image
// function renderBox({ box, label }) {
// const { xmax, xmin, ymax, ymin } = box;
// // Generate a random color for the box
// const color = '#' + Math.floor(Math.random() * 0xFFFFFF).toString(16).padStart(6, 0);
// // Draw the box
// const boxElement = document.createElement('div');
// boxElement.className = 'bounding-box';
// Object.assign(boxElement.style, {
// borderColor: color,
// left: 100 * xmin + '%',
// top: 100 * ymin + '%',
// width: 100 * (xmax - xmin) + '%',
// height: 100 * (ymax - ymin) + '%',
// })
// // Draw label
// const labelElement = document.createElement('span');
// labelElement.textContent = label;
// labelElement.className = 'bounding-box-label';
// labelElement.style.backgroundColor = color;
// boxElement.appendChild(labelElement);
// imageContainer.appendChild(boxElement);
// }
// function setup(){
// let canvas = createCanvas(200,200)
// canvas.position(300, 1000);
// background(200)
// textSize(20)
// textAlign(CENTER,CENTER)
// console.log('p5 loaded')
// }
// function draw(){
// //
// }