Sarah Ciston
commited on
Commit
·
2b5391c
1
Parent(s):
33b753f
updated preprompt; undec var
Browse files
sketch.js
CHANGED
@@ -105,7 +105,7 @@ new p5(function (p5) {
|
|
105 |
// TO-DO a model drop down list?
|
106 |
|
107 |
// alt-text description
|
108 |
-
describe(`Pink and black text on a white background with form inputs and two buttons. The text describes a p5.js Critical AI Prompt Battle tool that lets you run several AI chat prompts at once and compare their results. Use it to explore what models 'know' about various concepts, communities, and cultures. In the largest form input you can write a prompt to submit. In smaller inputs, you can write variables that will be inserted into that prompt as variations of the prompt when it is run through the model. There is a submit button, a button to add more variations, and when the model is run it adds text at the bottom showing the output results.`)
|
109 |
}
|
110 |
|
111 |
function addField(){
|
@@ -135,10 +135,12 @@ new p5(function (p5) {
|
|
135 |
|
136 |
// BLANKS = inputValues // get ready to feed array list into model
|
137 |
|
138 |
-
let PREPROMPT = `
|
139 |
|
140 |
// we pass PROMPT and PREPROMPT to the model function, don't need to pass BLANKSVALUES bc it's passed into the PREPROMPT already here
|
141 |
|
|
|
|
|
142 |
let modelResult = await runModel(PREPROMPT, PROMPT)
|
143 |
|
144 |
await displayModel(modelResult)
|
@@ -164,14 +166,14 @@ async function runModel(PREPROMPT, PROMPT){
|
|
164 |
// 'openai-community/gpt2'
|
165 |
// 'Xenova/gpt-3.5-turbo'
|
166 |
|
167 |
-
let out = await pipe((PREPROMPT, PROMPT), {
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
})
|
173 |
|
174 |
-
|
175 |
|
176 |
console.log(out)
|
177 |
|
|
|
105 |
// TO-DO a model drop down list?
|
106 |
|
107 |
// alt-text description
|
108 |
+
p5.describe(`Pink and black text on a white background with form inputs and two buttons. The text describes a p5.js Critical AI Prompt Battle tool that lets you run several AI chat prompts at once and compare their results. Use it to explore what models 'know' about various concepts, communities, and cultures. In the largest form input you can write a prompt to submit. In smaller inputs, you can write variables that will be inserted into that prompt as variations of the prompt when it is run through the model. There is a submit button, a button to add more variations, and when the model is run it adds text at the bottom showing the output results.`)
|
109 |
}
|
110 |
|
111 |
function addField(){
|
|
|
135 |
|
136 |
// BLANKS = inputValues // get ready to feed array list into model
|
137 |
|
138 |
+
let PREPROMPT = `In the sample sentence I provide, please fill in the [BLANK] with each word I in the array ${BLANKSVALUES}, replace any [FILL] with an appropriate word of your choice, and respond with an array of sentences. Here is the SAMPLE SENTENCE: `
|
139 |
|
140 |
// we pass PROMPT and PREPROMPT to the model function, don't need to pass BLANKSVALUES bc it's passed into the PREPROMPT already here
|
141 |
|
142 |
+
// Please return an array of sentences based on the sample sentence to follow. In each sentence,
|
143 |
+
|
144 |
let modelResult = await runModel(PREPROMPT, PROMPT)
|
145 |
|
146 |
await displayModel(modelResult)
|
|
|
166 |
// 'openai-community/gpt2'
|
167 |
// 'Xenova/gpt-3.5-turbo'
|
168 |
|
169 |
+
// let out = await pipe((PREPROMPT, PROMPT), {
|
170 |
+
// max_tokens: 250,
|
171 |
+
// return_full_text: false,
|
172 |
+
// repetition_penalty: 1.5,
|
173 |
+
// num_return_sequences: 1 //must be 1 for greedy search
|
174 |
+
// })
|
175 |
|
176 |
+
out = await pipe((PREPROMPT + PROMPT))
|
177 |
|
178 |
console.log(out)
|
179 |
|