Sarah Ciston
commited on
Commit
•
5b326aa
1
Parent(s):
07cb0df
rename sketch.js
Browse files- index.html +4 -4
- index.js → sketch.js +7 -7
index.html
CHANGED
@@ -3,10 +3,10 @@
|
|
3 |
|
4 |
<head>
|
5 |
<meta charset="UTF-8" />
|
6 |
-
<link rel="stylesheet" href="style.css" />
|
7 |
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.4/p5.js"></script>
|
8 |
-
|
9 |
-
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
10 |
<title>p5.js Critical AI Prompt Battle</title>
|
11 |
</head>
|
12 |
|
@@ -25,7 +25,7 @@
|
|
25 |
<!-- <label id="status">Loading model...</label> -->
|
26 |
<!-- <input id="upload" type="file" accept="image/*" /> -->
|
27 |
</main>
|
28 |
-
<script
|
29 |
</body>
|
30 |
|
31 |
</html>
|
|
|
3 |
|
4 |
<head>
|
5 |
<meta charset="UTF-8" />
|
6 |
+
<link rel="stylesheet" type="text/css" href="style.css" />
|
7 |
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.4/p5.js"></script>
|
8 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.4/addons/p5.sound.min.js"></script>
|
9 |
+
<!-- <meta name="viewport" content="width=device-width, initial-scale=1.0" /> -->
|
10 |
<title>p5.js Critical AI Prompt Battle</title>
|
11 |
</head>
|
12 |
|
|
|
25 |
<!-- <label id="status">Loading model...</label> -->
|
26 |
<!-- <input id="upload" type="file" accept="image/*" /> -->
|
27 |
</main>
|
28 |
+
<script type="module" src="sketch.js"></script>
|
29 |
</body>
|
30 |
|
31 |
</html>
|
index.js → sketch.js
RENAMED
@@ -5,7 +5,7 @@ import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers
|
|
5 |
// import { pipeline } from '@xenova/transformers';
|
6 |
|
7 |
let pipe = await pipeline('text-generation', 'mistralai/Mistral-7B-Instruct-v0.2');
|
8 |
-
// models('gpt2', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-8B')
|
9 |
|
10 |
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
|
11 |
// env.allowLocalModels = false;
|
@@ -194,10 +194,10 @@ new p5(function(p5){
|
|
194 |
|
195 |
}
|
196 |
|
197 |
-
function makeInput(i){
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
});
|
|
|
5 |
// import { pipeline } from '@xenova/transformers';
|
6 |
|
7 |
let pipe = await pipeline('text-generation', 'mistralai/Mistral-7B-Instruct-v0.2');
|
8 |
+
// models('Xenova/gpt2', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-8B')
|
9 |
|
10 |
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
|
11 |
// env.allowLocalModels = false;
|
|
|
194 |
|
195 |
}
|
196 |
|
197 |
+
// function makeInput(i){
|
198 |
+
// i = p5.createInput("");
|
199 |
+
// i.position(0, 300); //append to last input and move buttons down
|
200 |
+
// i.size(200);
|
201 |
+
// i.elt.style.fontSize = "15px";
|
202 |
+
// }
|
203 |
});
|