Spaces:
Sleeping
Sleeping
Final Bulk ?
Browse files- .DS_Store +0 -0
- patches/assets/GrayCat.webp +0 -0
- patches/assets/a16z.png +0 -0
- patches/assets/background.webp +0 -0
- patches/assets/close.svg +18 -0
- patches/assets/convex-bg.webp +0 -0
- patches/assets/convex.svg +19 -0
- patches/assets/help.svg +20 -0
- patches/assets/interact.svg +11 -0
- patches/assets/star.svg +10 -0
- patches/assets/ui/box.svg +15 -0
- patches/assets/ui/bubble-left.svg +27 -0
- patches/assets/ui/bubble-right.svg +20 -0
- patches/assets/ui/button.svg +18 -0
- patches/assets/ui/button_pressed.svg +18 -0
- patches/assets/ui/chats.svg +17 -0
- patches/assets/ui/desc.svg +50 -0
- patches/assets/ui/frame.svg +12 -0
- patches/assets/ui/jewel_box.svg +34 -0
- patches/assets/volume.svg +17 -0
- patches/characters.ts +1 -1
- patches/convex/aiTown/game.ts +8 -2
- patches/convex/aiTown/gameCycle.ts +24 -12
- patches/convex/aiTown/player.ts +1 -1
- patches/convex/constants.ts +12 -10
- patches/convex/init.ts +2 -2
- patches/convex/util/llm.ts +133 -128
- patches/convex/world.ts +17 -31
- patches/data/characters.ts +41 -33
- patches/public/assets/GrayCat.webp +0 -0
- patches/src/App.tsx +11 -15
- patches/src/components/Character.tsx +3 -3
- patches/src/components/Game.tsx +1 -1
- patches/src/components/PixiGame.tsx +2 -0
- patches/src/components/Player.tsx +7 -4
- patches/src/components/buttons/InteractButton.tsx +2 -1
.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
patches/assets/GrayCat.webp
ADDED
|
patches/assets/a16z.png
ADDED
|
patches/assets/background.webp
ADDED
|
patches/assets/close.svg
ADDED
|
|
patches/assets/convex-bg.webp
ADDED
|
patches/assets/convex.svg
ADDED
|
|
patches/assets/help.svg
ADDED
|
|
patches/assets/interact.svg
ADDED
|
|
patches/assets/star.svg
ADDED
|
|
patches/assets/ui/box.svg
ADDED
|
|
patches/assets/ui/bubble-left.svg
ADDED
|
|
patches/assets/ui/bubble-right.svg
ADDED
|
|
patches/assets/ui/button.svg
ADDED
|
|
patches/assets/ui/button_pressed.svg
ADDED
|
|
patches/assets/ui/chats.svg
ADDED
|
|
patches/assets/ui/desc.svg
ADDED
|
|
patches/assets/ui/frame.svg
ADDED
|
|
patches/assets/ui/jewel_box.svg
ADDED
|
|
patches/assets/volume.svg
ADDED
|
|
patches/characters.ts
CHANGED
|
@@ -217,7 +217,7 @@ export const characters = [
|
|
| 217 |
},
|
| 218 |
{
|
| 219 |
name: 'c1',
|
| 220 |
-
textureUrl: '/
|
| 221 |
spritesheetData: c1SpritesheetData,
|
| 222 |
speed: 0.19,
|
| 223 |
},
|
|
|
|
| 217 |
},
|
| 218 |
{
|
| 219 |
name: 'c1',
|
| 220 |
+
textureUrl: '/assets/GrayCat.png',
|
| 221 |
spritesheetData: c1SpritesheetData,
|
| 222 |
speed: 0.19,
|
| 223 |
},
|
patches/convex/aiTown/game.ts
CHANGED
|
@@ -25,6 +25,7 @@ import { internal } from '../_generated/api';
|
|
| 25 |
import { HistoricalObject } from '../engine/historicalObject';
|
| 26 |
import { AgentDescription, serializedAgentDescription } from './agentDescription';
|
| 27 |
import { parseMap, serializeMap } from '../util/object';
|
|
|
|
| 28 |
|
| 29 |
type WerewolfLookupTable = {
|
| 30 |
[key: number]: number;
|
|
@@ -229,8 +230,8 @@ export class Game extends AbstractGame {
|
|
| 229 |
// Check for end game conditions
|
| 230 |
// are there any humans?
|
| 231 |
// we check for endgame if there's at least 1 human player
|
| 232 |
-
const humans = [...this.world.
|
| 233 |
-
if (humans.length > 0) {
|
| 234 |
// all 'werewolf' are dead -> villagers win
|
| 235 |
const werewolves = [...this.world.players.values()].filter(player =>
|
| 236 |
player.playerType(this) === 'werewolf'
|
|
@@ -253,6 +254,11 @@ export class Game extends AbstractGame {
|
|
| 253 |
this.world.winner = 'werewolves'
|
| 254 |
}
|
| 255 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 256 |
|
| 257 |
// debug
|
| 258 |
// console.log(`we have ${ villagers.length } villagers`)
|
|
|
|
| 25 |
import { HistoricalObject } from '../engine/historicalObject';
|
| 26 |
import { AgentDescription, serializedAgentDescription } from './agentDescription';
|
| 27 |
import { parseMap, serializeMap } from '../util/object';
|
| 28 |
+
import { LOBBY_SIZE } from '../constants';
|
| 29 |
|
| 30 |
type WerewolfLookupTable = {
|
| 31 |
[key: number]: number;
|
|
|
|
| 230 |
// Check for end game conditions
|
| 231 |
// are there any humans?
|
| 232 |
// we check for endgame if there's at least 1 human player
|
| 233 |
+
const humans = [...this.world.playersInit.values()].filter(player => player.human)
|
| 234 |
+
if (this.world.gameCycle.cycleState !== 'LobbyState' && humans.length > 0) {
|
| 235 |
// all 'werewolf' are dead -> villagers win
|
| 236 |
const werewolves = [...this.world.players.values()].filter(player =>
|
| 237 |
player.playerType(this) === 'werewolf'
|
|
|
|
| 254 |
this.world.winner = 'werewolves'
|
| 255 |
}
|
| 256 |
}
|
| 257 |
+
|
| 258 |
+
// Quit LobbyState to start the game once we have at least 3 players
|
| 259 |
+
if (this.world.gameCycle.cycleState === 'LobbyState' && humans.length >= LOBBY_SIZE) {
|
| 260 |
+
this.world.gameCycle.startGame(this)
|
| 261 |
+
}
|
| 262 |
|
| 263 |
// debug
|
| 264 |
// console.log(`we have ${ villagers.length } villagers`)
|
patches/convex/aiTown/gameCycle.ts
CHANGED
|
@@ -115,19 +115,19 @@ const onStateChange = (prevState: CycleState, newState: CycleState, game: Game,
|
|
| 115 |
})
|
| 116 |
};
|
| 117 |
if (prevState === 'PlayerKillVoting') {
|
| 118 |
-
const werewolves = [...game.world.players.values()].filter((were) => {
|
| 119 |
-
|
| 120 |
-
})
|
| 121 |
-
if (werewolves.length != 0) {
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
}
|
| 128 |
-
} else {
|
| 129 |
-
console.log('no werewolves, nobody was killed')
|
| 130 |
}
|
|
|
|
|
|
|
|
|
|
| 131 |
game.world.gameVotes = [];
|
| 132 |
}
|
| 133 |
if (prevState === 'WerewolfVoting') {
|
|
@@ -151,6 +151,10 @@ const onStateChange = (prevState: CycleState, newState: CycleState, game: Game,
|
|
| 151 |
pushToGist(averageCorrectVotes);
|
| 152 |
|
| 153 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 154 |
};
|
| 155 |
|
| 156 |
export class GameCycle {
|
|
@@ -165,6 +169,14 @@ export class GameCycle {
|
|
| 165 |
this.cycleIndex = cycleIndex;
|
| 166 |
}
|
| 167 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 168 |
endgame(game: Game) {
|
| 169 |
this.currentTime = 0;
|
| 170 |
onStateChange(this.cycleState, 'EndGame', game, 0);
|
|
|
|
| 115 |
})
|
| 116 |
};
|
| 117 |
if (prevState === 'PlayerKillVoting') {
|
| 118 |
+
// const werewolves = [...game.world.players.values()].filter((were) => {
|
| 119 |
+
// game.playerDescriptions.get(were.id)?.type === 'werewolf'
|
| 120 |
+
// })
|
| 121 |
+
// if (werewolves.length != 0) {
|
| 122 |
+
const mostVotedPlayer = processVotes(game.world.gameVotes, [...game.world.players.values()])[0];
|
| 123 |
+
const playerToKill = game.world.players.get(mostVotedPlayer.playerId);
|
| 124 |
+
console.log(`killing: ${playerToKill?.id}, with ${game.world.gameVotes.length} votes`)
|
| 125 |
+
if (playerToKill) {
|
| 126 |
+
playerToKill.kill(game, now);
|
|
|
|
|
|
|
|
|
|
| 127 |
}
|
| 128 |
+
// } else {
|
| 129 |
+
// console.log('no werewolves, nobody was killed')
|
| 130 |
+
// }
|
| 131 |
game.world.gameVotes = [];
|
| 132 |
}
|
| 133 |
if (prevState === 'WerewolfVoting') {
|
|
|
|
| 151 |
pushToGist(averageCorrectVotes);
|
| 152 |
|
| 153 |
}
|
| 154 |
+
|
| 155 |
+
if (prevState == 'LobbyState') {
|
| 156 |
+
game.assignRoles()
|
| 157 |
+
}
|
| 158 |
};
|
| 159 |
|
| 160 |
export class GameCycle {
|
|
|
|
| 169 |
this.cycleIndex = cycleIndex;
|
| 170 |
}
|
| 171 |
|
| 172 |
+
startGame(game: Game) {
|
| 173 |
+
this.currentTime = 0;
|
| 174 |
+
onStateChange(this.cycleState, 'Day', game, 0);
|
| 175 |
+
this.cycleState = 'Day';
|
| 176 |
+
this.cycleIndex = 0;
|
| 177 |
+
console.log('Game started')
|
| 178 |
+
}
|
| 179 |
+
|
| 180 |
endgame(game: Game) {
|
| 181 |
this.currentTime = 0;
|
| 182 |
onStateChange(this.cycleState, 'EndGame', game, 0);
|
patches/convex/aiTown/player.ts
CHANGED
|
@@ -312,7 +312,7 @@ export const playerInputs = {
|
|
| 312 |
handler: (game, now, args) => {
|
| 313 |
Player.join(game, now, args.name, args.character, args.description, args.type ,args.tokenIdentifier);
|
| 314 |
// Temporary role assignment for testing
|
| 315 |
-
game.assignRoles()
|
| 316 |
return null;
|
| 317 |
},
|
| 318 |
}),
|
|
|
|
| 312 |
handler: (game, now, args) => {
|
| 313 |
Player.join(game, now, args.name, args.character, args.description, args.type ,args.tokenIdentifier);
|
| 314 |
// Temporary role assignment for testing
|
| 315 |
+
// game.assignRoles()
|
| 316 |
return null;
|
| 317 |
},
|
| 318 |
}),
|
patches/convex/constants.ts
CHANGED
|
@@ -71,21 +71,23 @@ export const ACTIVITIES = [
|
|
| 71 |
];
|
| 72 |
|
| 73 |
export const ENGINE_ACTION_DURATION = 30000;
|
| 74 |
-
export const DAY_DURATION = 60000;
|
| 75 |
-
export const NIGHT_DURATION = 60000;
|
| 76 |
-
export const WWOLF_VOTE_DURATION = 30000;
|
| 77 |
-
export const PLAYER_KILL_VOTE_DURATION = 30000;
|
| 78 |
-
export const LLM_VOTE_DURATION = 60000;
|
| 79 |
|
| 80 |
// Debugging
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
|
| 87 |
export const MAX_NPC = 8;
|
| 88 |
|
|
|
|
|
|
|
| 89 |
// Bound the number of pathfinding searches we do per game step.
|
| 90 |
export const MAX_PATHFINDS_PER_STEP = 16;
|
| 91 |
|
|
|
|
| 71 |
];
|
| 72 |
|
| 73 |
export const ENGINE_ACTION_DURATION = 30000;
|
| 74 |
+
// export const DAY_DURATION = 60000;
|
| 75 |
+
// export const NIGHT_DURATION = 60000;
|
| 76 |
+
// export const WWOLF_VOTE_DURATION = 30000;
|
| 77 |
+
// export const PLAYER_KILL_VOTE_DURATION = 30000;
|
| 78 |
+
// export const LLM_VOTE_DURATION = 60000;
|
| 79 |
|
| 80 |
// Debugging
|
| 81 |
+
export const DAY_DURATION = 5000;
|
| 82 |
+
export const NIGHT_DURATION = 5000;
|
| 83 |
+
export const WWOLF_VOTE_DURATION = 1000;
|
| 84 |
+
export const PLAYER_KILL_VOTE_DURATION = 1000;
|
| 85 |
+
export const LLM_VOTE_DURATION = 1000;
|
| 86 |
|
| 87 |
export const MAX_NPC = 8;
|
| 88 |
|
| 89 |
+
export const LOBBY_SIZE = 1;
|
| 90 |
+
|
| 91 |
// Bound the number of pathfinding searches we do per game step.
|
| 92 |
export const MAX_PATHFINDS_PER_STEP = 16;
|
| 93 |
|
patches/convex/init.ts
CHANGED
|
@@ -63,8 +63,8 @@ async function getOrCreateDefaultWorld(ctx: MutationCtx) {
|
|
| 63 |
// initialize game cycle counter
|
| 64 |
gameCycle: {
|
| 65 |
currentTime: 0,
|
| 66 |
-
cycleState: '
|
| 67 |
-
cycleIndex:
|
| 68 |
},
|
| 69 |
gameVotes: [],
|
| 70 |
llmVotes: []
|
|
|
|
| 63 |
// initialize game cycle counter
|
| 64 |
gameCycle: {
|
| 65 |
currentTime: 0,
|
| 66 |
+
cycleState: 'LobbyState',
|
| 67 |
+
cycleIndex: -1,
|
| 68 |
},
|
| 69 |
gameVotes: [],
|
| 70 |
llmVotes: []
|
patches/convex/util/llm.ts
CHANGED
|
@@ -1,22 +1,14 @@
|
|
| 1 |
-
|
| 2 |
|
| 3 |
export const LLM_CONFIG = {
|
| 4 |
-
/* Hugginface config: */
|
| 5 |
-
ollama: false,
|
| 6 |
-
huggingface: true,
|
| 7 |
-
url: "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct",
|
| 8 |
-
chatModel: "meta-llama/Meta-Llama-3-8B-Instruct",
|
| 9 |
-
embeddingModel:
|
| 10 |
-
"https://api-inference.huggingface.co/models/mixedbread-ai/mxbai-embed-large-v1",
|
| 11 |
-
embeddingDimension: 1024,
|
| 12 |
-
|
| 13 |
/* Ollama (local) config:
|
| 14 |
*/
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
|
|
|
| 20 |
// embeddingModel: 'llama3',
|
| 21 |
// embeddingDimension: 4096,
|
| 22 |
|
|
@@ -26,6 +18,7 @@ export const LLM_CONFIG = {
|
|
| 26 |
chatModel: 'meta-llama/Llama-3-8b-chat-hf',
|
| 27 |
embeddingModel: 'togethercomputer/m2-bert-80M-8k-retrieval',
|
| 28 |
embeddingDimension: 768,
|
|
|
|
| 29 |
*/
|
| 30 |
|
| 31 |
/* OpenAI config:
|
|
@@ -44,10 +37,10 @@ function apiUrl(path: string) {
|
|
| 44 |
process.env.OLLAMA_HOST ??
|
| 45 |
process.env.OPENAI_API_BASE ??
|
| 46 |
LLM_CONFIG.url;
|
| 47 |
-
if (host.endsWith(
|
| 48 |
return host + path.slice(1);
|
| 49 |
-
} else if (!host.endsWith(
|
| 50 |
-
return host +
|
| 51 |
} else {
|
| 52 |
return host + path;
|
| 53 |
}
|
|
@@ -60,67 +53,72 @@ function apiKey() {
|
|
| 60 |
const AuthHeaders = (): Record<string, string> =>
|
| 61 |
apiKey()
|
| 62 |
? {
|
| 63 |
-
Authorization:
|
| 64 |
}
|
| 65 |
: {};
|
| 66 |
|
| 67 |
// Overload for non-streaming
|
| 68 |
export async function chatCompletion(
|
| 69 |
-
body: Omit<CreateChatCompletionRequest,
|
| 70 |
-
model?: CreateChatCompletionRequest[
|
| 71 |
} & {
|
| 72 |
stream?: false | null | undefined;
|
| 73 |
-
}
|
| 74 |
): Promise<{ content: string; retries: number; ms: number }>;
|
| 75 |
// Overload for streaming
|
| 76 |
export async function chatCompletion(
|
| 77 |
-
body: Omit<CreateChatCompletionRequest,
|
| 78 |
-
model?: CreateChatCompletionRequest[
|
| 79 |
} & {
|
| 80 |
stream?: true;
|
| 81 |
-
}
|
| 82 |
): Promise<{ content: ChatCompletionContent; retries: number; ms: number }>;
|
| 83 |
export async function chatCompletion(
|
| 84 |
-
body: Omit<CreateChatCompletionRequest,
|
| 85 |
-
model?: CreateChatCompletionRequest[
|
| 86 |
-
}
|
| 87 |
) {
|
| 88 |
assertApiKey();
|
| 89 |
// OLLAMA_MODEL is legacy
|
| 90 |
body.model =
|
| 91 |
-
body.model ??
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
const stopWords = body.stop
|
| 96 |
-
? typeof body.stop === "string"
|
| 97 |
-
? [body.stop]
|
| 98 |
-
: body.stop
|
| 99 |
-
: [];
|
| 100 |
-
if (LLM_CONFIG.ollama || LLM_CONFIG.huggingface) stopWords.push("<|eot_id|>");
|
| 101 |
-
|
| 102 |
const {
|
| 103 |
result: content,
|
| 104 |
retries,
|
| 105 |
ms,
|
| 106 |
} = await retryWithBackoff(async () => {
|
| 107 |
-
const
|
| 108 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
if (body.stream) {
|
| 110 |
-
|
| 111 |
-
...body,
|
| 112 |
-
});
|
| 113 |
-
return new ChatCompletionContent(completion, stopWords);
|
| 114 |
} else {
|
| 115 |
-
const
|
| 116 |
-
|
| 117 |
-
});
|
| 118 |
-
const content = completion.choices[0].message?.content;
|
| 119 |
if (content === undefined) {
|
| 120 |
-
throw new Error(
|
| 121 |
-
"Unexpected result from OpenAI: " + JSON.stringify(completion)
|
| 122 |
-
);
|
| 123 |
}
|
|
|
|
| 124 |
return content;
|
| 125 |
}
|
| 126 |
});
|
|
@@ -133,20 +131,17 @@ export async function chatCompletion(
|
|
| 133 |
}
|
| 134 |
|
| 135 |
export async function tryPullOllama(model: string, error: string) {
|
| 136 |
-
if (error.includes(
|
| 137 |
-
console.error(
|
| 138 |
-
const pullResp = await fetch(apiUrl(
|
| 139 |
-
method:
|
| 140 |
headers: {
|
| 141 |
-
|
| 142 |
},
|
| 143 |
body: JSON.stringify({ name: model }),
|
| 144 |
});
|
| 145 |
-
console.log(
|
| 146 |
-
throw {
|
| 147 |
-
retry: true,
|
| 148 |
-
error: `Dynamically pulled model. Original error: ${error}`,
|
| 149 |
-
};
|
| 150 |
}
|
| 151 |
}
|
| 152 |
|
|
@@ -155,61 +150,39 @@ export async function fetchEmbeddingBatch(texts: string[]) {
|
|
| 155 |
return {
|
| 156 |
ollama: true as const,
|
| 157 |
embeddings: await Promise.all(
|
| 158 |
-
texts.map(async (t) => (await ollamaFetchEmbedding(t)).embedding)
|
| 159 |
),
|
| 160 |
};
|
| 161 |
}
|
| 162 |
assertApiKey();
|
| 163 |
-
|
| 164 |
-
if (LLM_CONFIG.huggingface) {
|
| 165 |
-
const result = await fetch(LLM_CONFIG.embeddingModel, {
|
| 166 |
-
method: "POST",
|
| 167 |
-
headers: {
|
| 168 |
-
"Content-Type": "application/json",
|
| 169 |
-
"X-Wait-For-Model": "true",
|
| 170 |
-
...AuthHeaders(),
|
| 171 |
-
},
|
| 172 |
-
body: JSON.stringify({
|
| 173 |
-
inputs: texts.map((text) => text.replace(/\n/g, " ")),
|
| 174 |
-
}),
|
| 175 |
-
});
|
| 176 |
-
const embeddings = await result.json();
|
| 177 |
-
return {
|
| 178 |
-
ollama: true as const,
|
| 179 |
-
embeddings: embeddings,
|
| 180 |
-
};
|
| 181 |
-
}
|
| 182 |
-
|
| 183 |
const {
|
| 184 |
result: json,
|
| 185 |
retries,
|
| 186 |
ms,
|
| 187 |
} = await retryWithBackoff(async () => {
|
| 188 |
-
const result = await fetch(apiUrl(
|
| 189 |
-
method:
|
| 190 |
headers: {
|
| 191 |
-
|
| 192 |
...AuthHeaders(),
|
| 193 |
},
|
| 194 |
|
| 195 |
body: JSON.stringify({
|
| 196 |
model: LLM_CONFIG.embeddingModel,
|
| 197 |
-
input: texts.map((text) => text.replace(/\n/g,
|
| 198 |
}),
|
| 199 |
});
|
| 200 |
if (!result.ok) {
|
| 201 |
throw {
|
| 202 |
retry: result.status === 429 || result.status >= 500,
|
| 203 |
-
error: new Error(
|
| 204 |
-
`Embedding failed with code ${result.status}: ${await result.text()}`
|
| 205 |
-
),
|
| 206 |
};
|
| 207 |
}
|
| 208 |
return (await result.json()) as CreateEmbeddingResponse;
|
| 209 |
});
|
| 210 |
if (json.data.length !== texts.length) {
|
| 211 |
console.error(json);
|
| 212 |
-
throw new Error(
|
| 213 |
}
|
| 214 |
const allembeddings = json.data;
|
| 215 |
allembeddings.sort((a, b) => a.index - b.index);
|
|
@@ -230,10 +203,10 @@ export async function fetchEmbedding(text: string) {
|
|
| 230 |
export async function fetchModeration(content: string) {
|
| 231 |
assertApiKey();
|
| 232 |
const { result: flagged } = await retryWithBackoff(async () => {
|
| 233 |
-
const result = await fetch(apiUrl(
|
| 234 |
-
method:
|
| 235 |
headers: {
|
| 236 |
-
|
| 237 |
...AuthHeaders(),
|
| 238 |
},
|
| 239 |
|
|
@@ -244,9 +217,7 @@ export async function fetchModeration(content: string) {
|
|
| 244 |
if (!result.ok) {
|
| 245 |
throw {
|
| 246 |
retry: result.status === 429 || result.status >= 500,
|
| 247 |
-
error: new Error(
|
| 248 |
-
`Embedding failed with code ${result.status}: ${await result.text()}`
|
| 249 |
-
),
|
| 250 |
};
|
| 251 |
}
|
| 252 |
return (await result.json()) as { results: { flagged: boolean }[] };
|
|
@@ -257,9 +228,9 @@ export async function fetchModeration(content: string) {
|
|
| 257 |
export function assertApiKey() {
|
| 258 |
if (!LLM_CONFIG.ollama && !apiKey()) {
|
| 259 |
throw new Error(
|
| 260 |
-
|
| 261 |
-
(LLM_CONFIG.ollama ?
|
| 262 |
-
" convex env set LLM_API_KEY 'your-key'"
|
| 263 |
);
|
| 264 |
}
|
| 265 |
}
|
|
@@ -270,7 +241,7 @@ const RETRY_JITTER = 100; // In ms
|
|
| 270 |
type RetryError = { retry: boolean; error: any };
|
| 271 |
|
| 272 |
export async function retryWithBackoff<T>(
|
| 273 |
-
fn: () => Promise<T
|
| 274 |
): Promise<{ retries: number; result: T; ms: number }> {
|
| 275 |
let i = 0;
|
| 276 |
for (; i <= RETRY_BACKOFF.length; i++) {
|
|
@@ -284,13 +255,11 @@ export async function retryWithBackoff<T>(
|
|
| 284 |
if (i < RETRY_BACKOFF.length) {
|
| 285 |
if (retryError.retry) {
|
| 286 |
console.log(
|
| 287 |
-
`Attempt ${i + 1} failed, waiting ${
|
| 288 |
-
|
| 289 |
-
}ms to retry...`,
|
| 290 |
-
Date.now()
|
| 291 |
);
|
| 292 |
await new Promise((resolve) =>
|
| 293 |
-
setTimeout(resolve, RETRY_BACKOFF[i] + RETRY_JITTER * Math.random())
|
| 294 |
);
|
| 295 |
continue;
|
| 296 |
}
|
|
@@ -299,7 +268,7 @@ export async function retryWithBackoff<T>(
|
|
| 299 |
else throw e;
|
| 300 |
}
|
| 301 |
}
|
| 302 |
-
throw new Error(
|
| 303 |
}
|
| 304 |
|
| 305 |
// Lifted from openai's package
|
|
@@ -314,7 +283,7 @@ export interface LLMMessage {
|
|
| 314 |
* The role of the messages author. One of `system`, `user`, `assistant`, or
|
| 315 |
* `function`.
|
| 316 |
*/
|
| 317 |
-
role:
|
| 318 |
|
| 319 |
/**
|
| 320 |
* The name of the author of this message. `name` is required if role is
|
|
@@ -349,7 +318,7 @@ interface CreateChatCompletionResponse {
|
|
| 349 |
choices: {
|
| 350 |
index?: number;
|
| 351 |
message?: {
|
| 352 |
-
role:
|
| 353 |
content: string;
|
| 354 |
};
|
| 355 |
finish_reason?: string;
|
|
@@ -478,7 +447,7 @@ export interface CreateChatCompletionRequest {
|
|
| 478 |
user?: string;
|
| 479 |
tools?: {
|
| 480 |
// The type of the tool. Currently, only function is supported.
|
| 481 |
-
type:
|
| 482 |
function: {
|
| 483 |
/**
|
| 484 |
* The name of the function to be called. Must be a-z, A-Z, 0-9, or
|
|
@@ -514,13 +483,13 @@ export interface CreateChatCompletionRequest {
|
|
| 514 |
* `auto` is the default if functions are present.
|
| 515 |
*/
|
| 516 |
tool_choice?:
|
| 517 |
-
|
|
| 518 |
-
|
|
| 519 |
// Specifies a tool the model should use. Use to force the model to call
|
| 520 |
// a specific function.
|
| 521 |
| {
|
| 522 |
// The type of the tool. Currently, only function is supported.
|
| 523 |
-
type:
|
| 524 |
function: { name: string };
|
| 525 |
};
|
| 526 |
// Replaced by "tools"
|
|
@@ -569,7 +538,7 @@ export interface CreateChatCompletionRequest {
|
|
| 569 |
* finish_reason="length", which indicates the generation exceeded max_tokens
|
| 570 |
* or the conversation exceeded the max context length.
|
| 571 |
*/
|
| 572 |
-
response_format?: { type:
|
| 573 |
}
|
| 574 |
|
| 575 |
// Checks whether a suffix of s1 is a prefix of s2. For example,
|
|
@@ -587,27 +556,35 @@ const suffixOverlapsPrefix = (s1: string, s2: string) => {
|
|
| 587 |
};
|
| 588 |
|
| 589 |
export class ChatCompletionContent {
|
| 590 |
-
private readonly
|
| 591 |
private readonly stopWords: string[];
|
| 592 |
|
| 593 |
-
constructor(
|
| 594 |
-
|
| 595 |
-
stopWords: string[]
|
| 596 |
-
) {
|
| 597 |
-
this.completion = completion;
|
| 598 |
this.stopWords = stopWords;
|
| 599 |
}
|
| 600 |
|
| 601 |
async *readInner() {
|
| 602 |
-
for await (const
|
| 603 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 604 |
}
|
| 605 |
}
|
| 606 |
|
| 607 |
// stop words in OpenAI api don't always work.
|
| 608 |
// So we have to truncate on our side.
|
| 609 |
async *read() {
|
| 610 |
-
let lastFragment =
|
| 611 |
for await (const data of this.readInner()) {
|
| 612 |
lastFragment += data;
|
| 613 |
let hasOverlap = false;
|
|
@@ -623,26 +600,54 @@ export class ChatCompletionContent {
|
|
| 623 |
}
|
| 624 |
if (hasOverlap) continue;
|
| 625 |
yield lastFragment;
|
| 626 |
-
lastFragment =
|
| 627 |
}
|
| 628 |
yield lastFragment;
|
| 629 |
}
|
| 630 |
|
| 631 |
async readAll() {
|
| 632 |
-
let allContent =
|
| 633 |
for await (const chunk of this.read()) {
|
| 634 |
allContent += chunk;
|
| 635 |
}
|
| 636 |
return allContent;
|
| 637 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 638 |
}
|
| 639 |
|
| 640 |
export async function ollamaFetchEmbedding(text: string) {
|
| 641 |
const { result } = await retryWithBackoff(async () => {
|
| 642 |
-
const resp = await fetch(apiUrl(
|
| 643 |
-
method:
|
| 644 |
headers: {
|
| 645 |
-
|
| 646 |
},
|
| 647 |
body: JSON.stringify({ model: LLM_CONFIG.embeddingModel, prompt: text }),
|
| 648 |
});
|
|
|
|
| 1 |
+
// That's right! No imports and no dependencies 🤯
|
| 2 |
|
| 3 |
export const LLM_CONFIG = {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
/* Ollama (local) config:
|
| 5 |
*/
|
| 6 |
+
ollama: true,
|
| 7 |
+
url: 'http://127.0.0.1:11434',
|
| 8 |
+
chatModel: 'llama3' as const,
|
| 9 |
+
embeddingModel: 'mxbai-embed-large',
|
| 10 |
+
embeddingDimension: 1024,
|
| 11 |
+
stopWords: ['<|eot_id|>'],
|
| 12 |
// embeddingModel: 'llama3',
|
| 13 |
// embeddingDimension: 4096,
|
| 14 |
|
|
|
|
| 18 |
chatModel: 'meta-llama/Llama-3-8b-chat-hf',
|
| 19 |
embeddingModel: 'togethercomputer/m2-bert-80M-8k-retrieval',
|
| 20 |
embeddingDimension: 768,
|
| 21 |
+
stopWords: ['<|eot_id|>'],
|
| 22 |
*/
|
| 23 |
|
| 24 |
/* OpenAI config:
|
|
|
|
| 37 |
process.env.OLLAMA_HOST ??
|
| 38 |
process.env.OPENAI_API_BASE ??
|
| 39 |
LLM_CONFIG.url;
|
| 40 |
+
if (host.endsWith('/') && path.startsWith('/')) {
|
| 41 |
return host + path.slice(1);
|
| 42 |
+
} else if (!host.endsWith('/') && !path.startsWith('/')) {
|
| 43 |
+
return host + '/' + path;
|
| 44 |
} else {
|
| 45 |
return host + path;
|
| 46 |
}
|
|
|
|
| 53 |
const AuthHeaders = (): Record<string, string> =>
|
| 54 |
apiKey()
|
| 55 |
? {
|
| 56 |
+
Authorization: 'Bearer ' + apiKey(),
|
| 57 |
}
|
| 58 |
: {};
|
| 59 |
|
| 60 |
// Overload for non-streaming
|
| 61 |
export async function chatCompletion(
|
| 62 |
+
body: Omit<CreateChatCompletionRequest, 'model'> & {
|
| 63 |
+
model?: CreateChatCompletionRequest['model'];
|
| 64 |
} & {
|
| 65 |
stream?: false | null | undefined;
|
| 66 |
+
},
|
| 67 |
): Promise<{ content: string; retries: number; ms: number }>;
|
| 68 |
// Overload for streaming
|
| 69 |
export async function chatCompletion(
|
| 70 |
+
body: Omit<CreateChatCompletionRequest, 'model'> & {
|
| 71 |
+
model?: CreateChatCompletionRequest['model'];
|
| 72 |
} & {
|
| 73 |
stream?: true;
|
| 74 |
+
},
|
| 75 |
): Promise<{ content: ChatCompletionContent; retries: number; ms: number }>;
|
| 76 |
export async function chatCompletion(
|
| 77 |
+
body: Omit<CreateChatCompletionRequest, 'model'> & {
|
| 78 |
+
model?: CreateChatCompletionRequest['model'];
|
| 79 |
+
},
|
| 80 |
) {
|
| 81 |
assertApiKey();
|
| 82 |
// OLLAMA_MODEL is legacy
|
| 83 |
body.model =
|
| 84 |
+
body.model ?? process.env.LLM_MODEL ?? process.env.OLLAMA_MODEL ?? LLM_CONFIG.chatModel;
|
| 85 |
+
const stopWords = body.stop ? (typeof body.stop === 'string' ? [body.stop] : body.stop) : [];
|
| 86 |
+
if (LLM_CONFIG.stopWords) stopWords.push(...LLM_CONFIG.stopWords);
|
| 87 |
+
console.log(body);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
const {
|
| 89 |
result: content,
|
| 90 |
retries,
|
| 91 |
ms,
|
| 92 |
} = await retryWithBackoff(async () => {
|
| 93 |
+
const result = await fetch(apiUrl('/v1/chat/completions'), {
|
| 94 |
+
method: 'POST',
|
| 95 |
+
headers: {
|
| 96 |
+
'Content-Type': 'application/json',
|
| 97 |
+
...AuthHeaders(),
|
| 98 |
+
},
|
| 99 |
+
|
| 100 |
+
body: JSON.stringify(body),
|
| 101 |
+
});
|
| 102 |
+
if (!result.ok) {
|
| 103 |
+
const error = await result.text();
|
| 104 |
+
console.error({ error });
|
| 105 |
+
if (result.status === 404 && LLM_CONFIG.ollama) {
|
| 106 |
+
await tryPullOllama(body.model!, error);
|
| 107 |
+
}
|
| 108 |
+
throw {
|
| 109 |
+
retry: result.status === 429 || result.status >= 500,
|
| 110 |
+
error: new Error(`Chat completion failed with code ${result.status}: ${error}`),
|
| 111 |
+
};
|
| 112 |
+
}
|
| 113 |
if (body.stream) {
|
| 114 |
+
return new ChatCompletionContent(result.body!, stopWords);
|
|
|
|
|
|
|
|
|
|
| 115 |
} else {
|
| 116 |
+
const json = (await result.json()) as CreateChatCompletionResponse;
|
| 117 |
+
const content = json.choices[0].message?.content;
|
|
|
|
|
|
|
| 118 |
if (content === undefined) {
|
| 119 |
+
throw new Error('Unexpected result from OpenAI: ' + JSON.stringify(json));
|
|
|
|
|
|
|
| 120 |
}
|
| 121 |
+
console.log(content);
|
| 122 |
return content;
|
| 123 |
}
|
| 124 |
});
|
|
|
|
| 131 |
}
|
| 132 |
|
| 133 |
export async function tryPullOllama(model: string, error: string) {
|
| 134 |
+
if (error.includes('try pulling')) {
|
| 135 |
+
console.error('Embedding model not found, pulling from Ollama');
|
| 136 |
+
const pullResp = await fetch(apiUrl('/api/pull'), {
|
| 137 |
+
method: 'POST',
|
| 138 |
headers: {
|
| 139 |
+
'Content-Type': 'application/json',
|
| 140 |
},
|
| 141 |
body: JSON.stringify({ name: model }),
|
| 142 |
});
|
| 143 |
+
console.log('Pull response', await pullResp.text());
|
| 144 |
+
throw { retry: true, error: `Dynamically pulled model. Original error: ${error}` };
|
|
|
|
|
|
|
|
|
|
| 145 |
}
|
| 146 |
}
|
| 147 |
|
|
|
|
| 150 |
return {
|
| 151 |
ollama: true as const,
|
| 152 |
embeddings: await Promise.all(
|
| 153 |
+
texts.map(async (t) => (await ollamaFetchEmbedding(t)).embedding),
|
| 154 |
),
|
| 155 |
};
|
| 156 |
}
|
| 157 |
assertApiKey();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 158 |
const {
|
| 159 |
result: json,
|
| 160 |
retries,
|
| 161 |
ms,
|
| 162 |
} = await retryWithBackoff(async () => {
|
| 163 |
+
const result = await fetch(apiUrl('/v1/embeddings'), {
|
| 164 |
+
method: 'POST',
|
| 165 |
headers: {
|
| 166 |
+
'Content-Type': 'application/json',
|
| 167 |
...AuthHeaders(),
|
| 168 |
},
|
| 169 |
|
| 170 |
body: JSON.stringify({
|
| 171 |
model: LLM_CONFIG.embeddingModel,
|
| 172 |
+
input: texts.map((text) => text.replace(/\n/g, ' ')),
|
| 173 |
}),
|
| 174 |
});
|
| 175 |
if (!result.ok) {
|
| 176 |
throw {
|
| 177 |
retry: result.status === 429 || result.status >= 500,
|
| 178 |
+
error: new Error(`Embedding failed with code ${result.status}: ${await result.text()}`),
|
|
|
|
|
|
|
| 179 |
};
|
| 180 |
}
|
| 181 |
return (await result.json()) as CreateEmbeddingResponse;
|
| 182 |
});
|
| 183 |
if (json.data.length !== texts.length) {
|
| 184 |
console.error(json);
|
| 185 |
+
throw new Error('Unexpected number of embeddings');
|
| 186 |
}
|
| 187 |
const allembeddings = json.data;
|
| 188 |
allembeddings.sort((a, b) => a.index - b.index);
|
|
|
|
| 203 |
export async function fetchModeration(content: string) {
|
| 204 |
assertApiKey();
|
| 205 |
const { result: flagged } = await retryWithBackoff(async () => {
|
| 206 |
+
const result = await fetch(apiUrl('/v1/moderations'), {
|
| 207 |
+
method: 'POST',
|
| 208 |
headers: {
|
| 209 |
+
'Content-Type': 'application/json',
|
| 210 |
...AuthHeaders(),
|
| 211 |
},
|
| 212 |
|
|
|
|
| 217 |
if (!result.ok) {
|
| 218 |
throw {
|
| 219 |
retry: result.status === 429 || result.status >= 500,
|
| 220 |
+
error: new Error(`Embedding failed with code ${result.status}: ${await result.text()}`),
|
|
|
|
|
|
|
| 221 |
};
|
| 222 |
}
|
| 223 |
return (await result.json()) as { results: { flagged: boolean }[] };
|
|
|
|
| 228 |
export function assertApiKey() {
|
| 229 |
if (!LLM_CONFIG.ollama && !apiKey()) {
|
| 230 |
throw new Error(
|
| 231 |
+
'\n Missing LLM_API_KEY in environment variables.\n\n' +
|
| 232 |
+
(LLM_CONFIG.ollama ? 'just' : 'npx') +
|
| 233 |
+
" convex env set LLM_API_KEY 'your-key'",
|
| 234 |
);
|
| 235 |
}
|
| 236 |
}
|
|
|
|
| 241 |
type RetryError = { retry: boolean; error: any };
|
| 242 |
|
| 243 |
export async function retryWithBackoff<T>(
|
| 244 |
+
fn: () => Promise<T>,
|
| 245 |
): Promise<{ retries: number; result: T; ms: number }> {
|
| 246 |
let i = 0;
|
| 247 |
for (; i <= RETRY_BACKOFF.length; i++) {
|
|
|
|
| 255 |
if (i < RETRY_BACKOFF.length) {
|
| 256 |
if (retryError.retry) {
|
| 257 |
console.log(
|
| 258 |
+
`Attempt ${i + 1} failed, waiting ${RETRY_BACKOFF[i]}ms to retry...`,
|
| 259 |
+
Date.now(),
|
|
|
|
|
|
|
| 260 |
);
|
| 261 |
await new Promise((resolve) =>
|
| 262 |
+
setTimeout(resolve, RETRY_BACKOFF[i] + RETRY_JITTER * Math.random()),
|
| 263 |
);
|
| 264 |
continue;
|
| 265 |
}
|
|
|
|
| 268 |
else throw e;
|
| 269 |
}
|
| 270 |
}
|
| 271 |
+
throw new Error('Unreachable');
|
| 272 |
}
|
| 273 |
|
| 274 |
// Lifted from openai's package
|
|
|
|
| 283 |
* The role of the messages author. One of `system`, `user`, `assistant`, or
|
| 284 |
* `function`.
|
| 285 |
*/
|
| 286 |
+
role: 'system' | 'user' | 'assistant' | 'function';
|
| 287 |
|
| 288 |
/**
|
| 289 |
* The name of the author of this message. `name` is required if role is
|
|
|
|
| 318 |
choices: {
|
| 319 |
index?: number;
|
| 320 |
message?: {
|
| 321 |
+
role: 'system' | 'user' | 'assistant';
|
| 322 |
content: string;
|
| 323 |
};
|
| 324 |
finish_reason?: string;
|
|
|
|
| 447 |
user?: string;
|
| 448 |
tools?: {
|
| 449 |
// The type of the tool. Currently, only function is supported.
|
| 450 |
+
type: 'function';
|
| 451 |
function: {
|
| 452 |
/**
|
| 453 |
* The name of the function to be called. Must be a-z, A-Z, 0-9, or
|
|
|
|
| 483 |
* `auto` is the default if functions are present.
|
| 484 |
*/
|
| 485 |
tool_choice?:
|
| 486 |
+
| 'none' // none means the model will not call a function and instead generates a message.
|
| 487 |
+
| 'auto' // auto means the model can pick between generating a message or calling a function.
|
| 488 |
// Specifies a tool the model should use. Use to force the model to call
|
| 489 |
// a specific function.
|
| 490 |
| {
|
| 491 |
// The type of the tool. Currently, only function is supported.
|
| 492 |
+
type: 'function';
|
| 493 |
function: { name: string };
|
| 494 |
};
|
| 495 |
// Replaced by "tools"
|
|
|
|
| 538 |
* finish_reason="length", which indicates the generation exceeded max_tokens
|
| 539 |
* or the conversation exceeded the max context length.
|
| 540 |
*/
|
| 541 |
+
response_format?: { type: 'text' | 'json_object' };
|
| 542 |
}
|
| 543 |
|
| 544 |
// Checks whether a suffix of s1 is a prefix of s2. For example,
|
|
|
|
| 556 |
};
|
| 557 |
|
| 558 |
export class ChatCompletionContent {
|
| 559 |
+
private readonly body: ReadableStream<Uint8Array>;
|
| 560 |
private readonly stopWords: string[];
|
| 561 |
|
| 562 |
+
constructor(body: ReadableStream<Uint8Array>, stopWords: string[]) {
|
| 563 |
+
this.body = body;
|
|
|
|
|
|
|
|
|
|
| 564 |
this.stopWords = stopWords;
|
| 565 |
}
|
| 566 |
|
| 567 |
async *readInner() {
|
| 568 |
+
for await (const data of this.splitStream(this.body)) {
|
| 569 |
+
if (data.startsWith('data: ')) {
|
| 570 |
+
try {
|
| 571 |
+
const json = JSON.parse(data.substring('data: '.length)) as {
|
| 572 |
+
choices: { delta: { content?: string } }[];
|
| 573 |
+
};
|
| 574 |
+
if (json.choices[0].delta.content) {
|
| 575 |
+
yield json.choices[0].delta.content;
|
| 576 |
+
}
|
| 577 |
+
} catch (e) {
|
| 578 |
+
// e.g. the last chunk is [DONE] which is not valid JSON.
|
| 579 |
+
}
|
| 580 |
+
}
|
| 581 |
}
|
| 582 |
}
|
| 583 |
|
| 584 |
// stop words in OpenAI api don't always work.
|
| 585 |
// So we have to truncate on our side.
|
| 586 |
async *read() {
|
| 587 |
+
let lastFragment = '';
|
| 588 |
for await (const data of this.readInner()) {
|
| 589 |
lastFragment += data;
|
| 590 |
let hasOverlap = false;
|
|
|
|
| 600 |
}
|
| 601 |
if (hasOverlap) continue;
|
| 602 |
yield lastFragment;
|
| 603 |
+
lastFragment = '';
|
| 604 |
}
|
| 605 |
yield lastFragment;
|
| 606 |
}
|
| 607 |
|
| 608 |
async readAll() {
|
| 609 |
+
let allContent = '';
|
| 610 |
for await (const chunk of this.read()) {
|
| 611 |
allContent += chunk;
|
| 612 |
}
|
| 613 |
return allContent;
|
| 614 |
}
|
| 615 |
+
|
| 616 |
+
async *splitStream(stream: ReadableStream<Uint8Array>) {
|
| 617 |
+
const reader = stream.getReader();
|
| 618 |
+
let lastFragment = '';
|
| 619 |
+
try {
|
| 620 |
+
while (true) {
|
| 621 |
+
const { value, done } = await reader.read();
|
| 622 |
+
if (done) {
|
| 623 |
+
// Flush the last fragment now that we're done
|
| 624 |
+
if (lastFragment !== '') {
|
| 625 |
+
yield lastFragment;
|
| 626 |
+
}
|
| 627 |
+
break;
|
| 628 |
+
}
|
| 629 |
+
const data = new TextDecoder().decode(value);
|
| 630 |
+
lastFragment += data;
|
| 631 |
+
const parts = lastFragment.split('\n\n');
|
| 632 |
+
// Yield all except for the last part
|
| 633 |
+
for (let i = 0; i < parts.length - 1; i += 1) {
|
| 634 |
+
yield parts[i];
|
| 635 |
+
}
|
| 636 |
+
// Save the last part as the new last fragment
|
| 637 |
+
lastFragment = parts[parts.length - 1];
|
| 638 |
+
}
|
| 639 |
+
} finally {
|
| 640 |
+
reader.releaseLock();
|
| 641 |
+
}
|
| 642 |
+
}
|
| 643 |
}
|
| 644 |
|
| 645 |
export async function ollamaFetchEmbedding(text: string) {
|
| 646 |
const { result } = await retryWithBackoff(async () => {
|
| 647 |
+
const resp = await fetch(apiUrl('/api/embeddings'), {
|
| 648 |
+
method: 'POST',
|
| 649 |
headers: {
|
| 650 |
+
'Content-Type': 'application/json',
|
| 651 |
},
|
| 652 |
body: JSON.stringify({ model: LLM_CONFIG.embeddingModel, prompt: text }),
|
| 653 |
});
|
patches/convex/world.ts
CHANGED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
import { ConvexError, v } from 'convex/values';
|
| 2 |
import { internalMutation, mutation, query } from './_generated/server';
|
| 3 |
import { characters } from '../data/characters';
|
| 4 |
-
import { insertInput } from './aiTown/insertInput';
|
| 5 |
import { Descriptions } from '../data/characters';
|
|
|
|
| 6 |
import {
|
|
|
|
| 7 |
ENGINE_ACTION_DURATION,
|
| 8 |
IDLE_WORLD_TIMEOUT,
|
| 9 |
WORLD_HEARTBEAT_INTERVAL,
|
|
@@ -106,7 +107,7 @@ export const userStatus = query({
|
|
| 106 |
if (!oauthToken) {
|
| 107 |
return null;
|
| 108 |
}
|
| 109 |
-
|
| 110 |
return oauthToken;
|
| 111 |
},
|
| 112 |
});
|
|
@@ -129,6 +130,7 @@ export const joinWorld = mutation({
|
|
| 129 |
// const name =
|
| 130 |
// identity.givenName || identity.nickname || (identity.email && identity.email.split('@')[0]);
|
| 131 |
const name = oauthToken;
|
|
|
|
| 132 |
// if (!name) {
|
| 133 |
// throw new ConvexError(`Missing name on ${JSON.stringify(identity)}`);
|
| 134 |
// }
|
|
@@ -136,37 +138,19 @@ export const joinWorld = mutation({
|
|
| 136 |
if (!world) {
|
| 137 |
throw new ConvexError(`Invalid world ID: ${args.worldId}`);
|
| 138 |
}
|
|
|
|
|
|
|
| 139 |
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
return description.name
|
| 150 |
-
}
|
| 151 |
-
})
|
| 152 |
-
const availableDescriptions = Descriptions.filter(
|
| 153 |
-
description => !namesInGame.includes(description.name)
|
| 154 |
-
);
|
| 155 |
-
|
| 156 |
-
const randomCharacter = availableDescriptions[Math.floor(Math.random() * availableDescriptions.length)];
|
| 157 |
|
| 158 |
-
// const { tokenIdentifier } = identity;
|
| 159 |
-
return await insertInput(ctx, world._id, 'join', {
|
| 160 |
-
name: randomCharacter.name,
|
| 161 |
-
character: randomCharacter.character,
|
| 162 |
-
description: randomCharacter.identity,
|
| 163 |
-
// description: `${identity.givenName} is a human player`,
|
| 164 |
-
tokenIdentifier: oauthToken, // TODO: change for multiplayer to oauth
|
| 165 |
-
// By default everybody is a villager
|
| 166 |
-
type: 'villager',
|
| 167 |
-
});
|
| 168 |
-
},
|
| 169 |
-
});
|
| 170 |
|
| 171 |
export const leaveWorld = mutation({
|
| 172 |
args: {
|
|
@@ -176,6 +160,8 @@ export const leaveWorld = mutation({
|
|
| 176 |
handler: async (ctx, args) => {
|
| 177 |
const { worldId, oauthToken } = args;
|
| 178 |
|
|
|
|
|
|
|
| 179 |
if (!oauthToken) {
|
| 180 |
throw new ConvexError(`Not logged in`);
|
| 181 |
}
|
|
|
|
| 1 |
import { ConvexError, v } from 'convex/values';
|
| 2 |
import { internalMutation, mutation, query } from './_generated/server';
|
| 3 |
import { characters } from '../data/characters';
|
|
|
|
| 4 |
import { Descriptions } from '../data/characters';
|
| 5 |
+
import { insertInput } from './aiTown/insertInput';
|
| 6 |
import {
|
| 7 |
+
DEFAULT_NAME,
|
| 8 |
ENGINE_ACTION_DURATION,
|
| 9 |
IDLE_WORLD_TIMEOUT,
|
| 10 |
WORLD_HEARTBEAT_INTERVAL,
|
|
|
|
| 107 |
if (!oauthToken) {
|
| 108 |
return null;
|
| 109 |
}
|
| 110 |
+
|
| 111 |
return oauthToken;
|
| 112 |
},
|
| 113 |
});
|
|
|
|
| 130 |
// const name =
|
| 131 |
// identity.givenName || identity.nickname || (identity.email && identity.email.split('@')[0]);
|
| 132 |
const name = oauthToken;
|
| 133 |
+
|
| 134 |
// if (!name) {
|
| 135 |
// throw new ConvexError(`Missing name on ${JSON.stringify(identity)}`);
|
| 136 |
// }
|
|
|
|
| 138 |
if (!world) {
|
| 139 |
throw new ConvexError(`Invalid world ID: ${args.worldId}`);
|
| 140 |
}
|
| 141 |
+
// Select a random character description
|
| 142 |
+
const randomCharacter = Descriptions[Math.floor(Math.random() * Descriptions.length)];
|
| 143 |
|
| 144 |
+
return await insertInput(ctx, world._id, 'join', {
|
| 145 |
+
name: randomCharacter.name,
|
| 146 |
+
character: randomCharacter.character,
|
| 147 |
+
description: randomCharacter.identity,
|
| 148 |
+
tokenIdentifier: oauthToken,
|
| 149 |
+
role: "villager",
|
| 150 |
+
});
|
| 151 |
+
},
|
| 152 |
+
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 154 |
|
| 155 |
export const leaveWorld = mutation({
|
| 156 |
args: {
|
|
|
|
| 160 |
handler: async (ctx, args) => {
|
| 161 |
const { worldId, oauthToken } = args;
|
| 162 |
|
| 163 |
+
|
| 164 |
+
console.log('OAuth Name:', oauthToken);
|
| 165 |
if (!oauthToken) {
|
| 166 |
throw new ConvexError(`Not logged in`);
|
| 167 |
}
|
patches/data/characters.ts
CHANGED
|
@@ -6,17 +6,18 @@ import { data as f5SpritesheetData } from './spritesheets/f5';
|
|
| 6 |
import { data as f6SpritesheetData } from './spritesheets/f6';
|
| 7 |
import { data as f7SpritesheetData } from './spritesheets/f7';
|
| 8 |
import { data as f8SpritesheetData } from './spritesheets/f8';
|
|
|
|
| 9 |
|
| 10 |
export const Descriptions = [
|
| 11 |
{
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
{
|
| 21 |
name: 'Lucky',
|
| 22 |
character: 'f1',
|
|
@@ -45,14 +46,14 @@ export const Descriptions = [
|
|
| 45 |
and not afraid to use her charm. she's a sociopath who has no empathy. but hides it well.`,
|
| 46 |
plan: 'You want to take advantage of others as much as possible.',
|
| 47 |
},
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
{
|
| 57 |
name: 'Alice',
|
| 58 |
character: 'f3',
|
|
@@ -168,53 +169,60 @@ export const Descriptions = [
|
|
| 168 |
export const characters = [
|
| 169 |
{
|
| 170 |
name: 'f1',
|
| 171 |
-
textureUrl: '/
|
| 172 |
spritesheetData: f1SpritesheetData,
|
| 173 |
-
speed: 0.
|
| 174 |
},
|
| 175 |
{
|
| 176 |
name: 'f2',
|
| 177 |
-
textureUrl: '/
|
| 178 |
spritesheetData: f2SpritesheetData,
|
| 179 |
-
speed: 0.
|
| 180 |
},
|
| 181 |
{
|
| 182 |
name: 'f3',
|
| 183 |
-
textureUrl: '/
|
| 184 |
spritesheetData: f3SpritesheetData,
|
| 185 |
-
speed: 0.
|
| 186 |
},
|
| 187 |
{
|
| 188 |
name: 'f4',
|
| 189 |
-
textureUrl: '/
|
| 190 |
spritesheetData: f4SpritesheetData,
|
| 191 |
-
speed: 0.
|
| 192 |
},
|
| 193 |
{
|
| 194 |
name: 'f5',
|
| 195 |
-
textureUrl: '/
|
| 196 |
spritesheetData: f5SpritesheetData,
|
| 197 |
-
speed: 0.
|
| 198 |
},
|
| 199 |
{
|
| 200 |
name: 'f6',
|
| 201 |
-
textureUrl: '/
|
| 202 |
spritesheetData: f6SpritesheetData,
|
| 203 |
-
speed: 0.
|
| 204 |
},
|
| 205 |
{
|
| 206 |
name: 'f7',
|
| 207 |
-
textureUrl: '/
|
| 208 |
spritesheetData: f7SpritesheetData,
|
| 209 |
-
speed: 0.
|
| 210 |
},
|
| 211 |
{
|
| 212 |
name: 'f8',
|
| 213 |
-
textureUrl: '/
|
| 214 |
spritesheetData: f8SpritesheetData,
|
| 215 |
-
speed: 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 216 |
},
|
|
|
|
| 217 |
];
|
| 218 |
|
| 219 |
// Characters move at 0.75 tiles per second.
|
| 220 |
-
export const movementSpeed =
|
|
|
|
| 6 |
import { data as f6SpritesheetData } from './spritesheets/f6';
|
| 7 |
import { data as f7SpritesheetData } from './spritesheets/f7';
|
| 8 |
import { data as f8SpritesheetData } from './spritesheets/f8';
|
| 9 |
+
import { data as c1SpritesheetData } from './spritesheets/c1';
|
| 10 |
|
| 11 |
export const Descriptions = [
|
| 12 |
{
|
| 13 |
+
name: 'Alex',
|
| 14 |
+
character: 'f5',
|
| 15 |
+
identity: `You are a fictional character whose name is Alex. You enjoy painting,
|
| 16 |
+
programming and reading sci-fi books. You are currently talking to a human who
|
| 17 |
+
is very interested to get to know you. You are kind but can be sarcastic. You
|
| 18 |
+
dislike repetitive questions. You get SUPER excited about books.`,
|
| 19 |
+
plan: 'You want to find love.',
|
| 20 |
+
},
|
| 21 |
{
|
| 22 |
name: 'Lucky',
|
| 23 |
character: 'f1',
|
|
|
|
| 46 |
and not afraid to use her charm. she's a sociopath who has no empathy. but hides it well.`,
|
| 47 |
plan: 'You want to take advantage of others as much as possible.',
|
| 48 |
},
|
| 49 |
+
{
|
| 50 |
+
name: 'Kurt',
|
| 51 |
+
character: 'f2',
|
| 52 |
+
identity: `Kurt knows about everything, including science and
|
| 53 |
+
computers and politics and history and biology. He loves talking about
|
| 54 |
+
everything, always injecting fun facts about the topic of discussion.`,
|
| 55 |
+
plan: 'You want to spread knowledge.',
|
| 56 |
+
},
|
| 57 |
{
|
| 58 |
name: 'Alice',
|
| 59 |
character: 'f3',
|
|
|
|
| 169 |
export const characters = [
|
| 170 |
{
|
| 171 |
name: 'f1',
|
| 172 |
+
textureUrl: '/assets/32x32folk.png',
|
| 173 |
spritesheetData: f1SpritesheetData,
|
| 174 |
+
speed: 0.19,
|
| 175 |
},
|
| 176 |
{
|
| 177 |
name: 'f2',
|
| 178 |
+
textureUrl: '/assets/32x32folk.png',
|
| 179 |
spritesheetData: f2SpritesheetData,
|
| 180 |
+
speed: 0.19,
|
| 181 |
},
|
| 182 |
{
|
| 183 |
name: 'f3',
|
| 184 |
+
textureUrl: '/assets/32x32folk.png',
|
| 185 |
spritesheetData: f3SpritesheetData,
|
| 186 |
+
speed: 0.19,
|
| 187 |
},
|
| 188 |
{
|
| 189 |
name: 'f4',
|
| 190 |
+
textureUrl: '/assets/32x32folk.png',
|
| 191 |
spritesheetData: f4SpritesheetData,
|
| 192 |
+
speed: 0.19,
|
| 193 |
},
|
| 194 |
{
|
| 195 |
name: 'f5',
|
| 196 |
+
textureUrl: '/assets/32x32folk.png',
|
| 197 |
spritesheetData: f5SpritesheetData,
|
| 198 |
+
speed: 0.19,
|
| 199 |
},
|
| 200 |
{
|
| 201 |
name: 'f6',
|
| 202 |
+
textureUrl: '/assets/32x32folk.png',
|
| 203 |
spritesheetData: f6SpritesheetData,
|
| 204 |
+
speed: 0.19,
|
| 205 |
},
|
| 206 |
{
|
| 207 |
name: 'f7',
|
| 208 |
+
textureUrl: '/assets/32x32folk.png',
|
| 209 |
spritesheetData: f7SpritesheetData,
|
| 210 |
+
speed: 0.19,
|
| 211 |
},
|
| 212 |
{
|
| 213 |
name: 'f8',
|
| 214 |
+
textureUrl: '/assets/32x32folk.png',
|
| 215 |
spritesheetData: f8SpritesheetData,
|
| 216 |
+
speed: 0.19,
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
name: 'c1',
|
| 220 |
+
textureUrl: '/assets/GrayCat.webp',
|
| 221 |
+
spritesheetData: c1SpritesheetData,
|
| 222 |
+
speed: 0.19,
|
| 223 |
},
|
| 224 |
+
|
| 225 |
];
|
| 226 |
|
| 227 |
// Characters move at 0.75 tiles per second.
|
| 228 |
+
export const movementSpeed = 2;
|
patches/public/assets/GrayCat.webp
ADDED
|
patches/src/App.tsx
CHANGED
|
@@ -13,17 +13,16 @@ import ReactModal from 'react-modal';
|
|
| 13 |
import MusicButton from './components/buttons/MusicButton.tsx';
|
| 14 |
import Button from './components/buttons/Button.tsx';
|
| 15 |
import InteractButton from './components/buttons/InteractButton.tsx';
|
| 16 |
-
import OAuthLogin from './components//buttons/OAuthLogin.tsx';
|
| 17 |
import FreezeButton from './components/FreezeButton.tsx';
|
| 18 |
import { MAX_HUMAN_PLAYERS } from '../convex/constants.ts';
|
| 19 |
import PoweredByConvex from './components/PoweredByConvex.tsx';
|
| 20 |
-
|
| 21 |
export default function Home() {
|
| 22 |
const [helpModalOpen, setHelpModalOpen] = useState(false);
|
| 23 |
return (
|
| 24 |
<main className="relative flex min-h-screen flex-col items-center justify-between font-body game-background">
|
| 25 |
|
| 26 |
-
<ReactModal
|
| 27 |
isOpen={helpModalOpen}
|
| 28 |
onRequestClose={() => setHelpModalOpen(false)}
|
| 29 |
style={modalStyles}
|
|
@@ -66,29 +65,26 @@ export default function Home() {
|
|
| 66 |
minutes, you'll be automatically removed from the game.
|
| 67 |
</p>
|
| 68 |
</div>
|
| 69 |
-
|
| 70 |
-
{/*<div className="p-3 absolute top-0 right-0 z-10 text-2xl">
|
| 71 |
-
<Authenticated>
|
| 72 |
-
<UserButton afterSignOutUrl="/ai-town" />
|
| 73 |
-
</Authenticated>
|
| 74 |
|
| 75 |
-
<Unauthenticated>
|
| 76 |
-
<LoginButton />
|
| 77 |
-
</Unauthenticated>
|
| 78 |
-
</div> */}
|
| 79 |
|
| 80 |
-
<div className="w-full lg:h-screen min-h-screen relative isolate overflow-hidden shadow-2xl flex flex-col justify-start">
|
|
|
|
|
|
|
|
|
|
| 81 |
<Game />
|
| 82 |
|
| 83 |
<footer className="justify-end bottom-0 left-0 w-full flex items-center mt-4 gap-3 p-6 flex-wrap pointer-events-none">
|
| 84 |
<div className="flex gap-4 flex-grow pointer-events-none">
|
|
|
|
| 85 |
<MusicButton />
|
| 86 |
-
|
|
|
|
|
|
|
| 87 |
<InteractButton />
|
| 88 |
<Button imgUrl={helpImg} onClick={() => setHelpModalOpen(true)}>
|
| 89 |
Help
|
| 90 |
</Button>
|
| 91 |
-
<OAuthLogin />
|
| 92 |
<div id="footer-buttons"/>
|
| 93 |
</div>
|
| 94 |
</footer>
|
|
|
|
| 13 |
import MusicButton from './components/buttons/MusicButton.tsx';
|
| 14 |
import Button from './components/buttons/Button.tsx';
|
| 15 |
import InteractButton from './components/buttons/InteractButton.tsx';
|
|
|
|
| 16 |
import FreezeButton from './components/FreezeButton.tsx';
|
| 17 |
import { MAX_HUMAN_PLAYERS } from '../convex/constants.ts';
|
| 18 |
import PoweredByConvex from './components/PoweredByConvex.tsx';
|
| 19 |
+
import OAuthLogin from './components/buttons/OAuthLogin.tsx';
|
| 20 |
export default function Home() {
|
| 21 |
const [helpModalOpen, setHelpModalOpen] = useState(false);
|
| 22 |
return (
|
| 23 |
<main className="relative flex min-h-screen flex-col items-center justify-between font-body game-background">
|
| 24 |
|
| 25 |
+
<ReactModal
|
| 26 |
isOpen={helpModalOpen}
|
| 27 |
onRequestClose={() => setHelpModalOpen(false)}
|
| 28 |
style={modalStyles}
|
|
|
|
| 65 |
minutes, you'll be automatically removed from the game.
|
| 66 |
</p>
|
| 67 |
</div>
|
| 68 |
+
</ReactModal>
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
|
| 71 |
+
<div className="w-full lg:h-screen min-h-screen relative isolate overflow-hidden lg:p-8 shadow-2xl flex flex-col justify-start">
|
| 72 |
+
<div className="flex gap-4 flex-grow pointer-events-none">
|
| 73 |
+
<OAuthLogin />
|
| 74 |
+
</div>
|
| 75 |
<Game />
|
| 76 |
|
| 77 |
<footer className="justify-end bottom-0 left-0 w-full flex items-center mt-4 gap-3 p-6 flex-wrap pointer-events-none">
|
| 78 |
<div className="flex gap-4 flex-grow pointer-events-none">
|
| 79 |
+
<FreezeButton />
|
| 80 |
<MusicButton />
|
| 81 |
+
<Button href="https://github.com/a16z-infra/ai-town" imgUrl={starImg}>
|
| 82 |
+
Star
|
| 83 |
+
</Button>
|
| 84 |
<InteractButton />
|
| 85 |
<Button imgUrl={helpImg} onClick={() => setHelpModalOpen(true)}>
|
| 86 |
Help
|
| 87 |
</Button>
|
|
|
|
| 88 |
<div id="footer-buttons"/>
|
| 89 |
</div>
|
| 90 |
</footer>
|
patches/src/components/Character.tsx
CHANGED
|
@@ -50,7 +50,7 @@ export const Character = ({
|
|
| 50 |
setSpriteSheet(sheet);
|
| 51 |
};
|
| 52 |
void parseSheet();
|
| 53 |
-
}, []);
|
| 54 |
|
| 55 |
// The first "left" is "right" but reflected.
|
| 56 |
const roundedOrientation = Math.floor(orientation / 90);
|
|
@@ -85,10 +85,10 @@ export const Character = ({
|
|
| 85 |
|
| 86 |
return (
|
| 87 |
<Container x={x} y={y} interactive={true} pointerdown={onClick} cursor="pointer">
|
| 88 |
-
{isThinking && (
|
| 89 |
// TODO: We'll eventually have separate assets for thinking and speech animations.
|
| 90 |
<Text x={-20} y={-10} scale={{ x: -0.8, y: 0.8 }} text={'💭'} anchor={{ x: 0.5, y: 0.5 }} />
|
| 91 |
-
)}
|
| 92 |
{isSpeaking && (
|
| 93 |
// TODO: We'll eventually have separate assets for thinking and speech animations.
|
| 94 |
<Text x={18} y={-10} scale={0.8} text={'💬'} anchor={{ x: 0.5, y: 0.5 }} />
|
|
|
|
| 50 |
setSpriteSheet(sheet);
|
| 51 |
};
|
| 52 |
void parseSheet();
|
| 53 |
+
}, [textureUrl]);
|
| 54 |
|
| 55 |
// The first "left" is "right" but reflected.
|
| 56 |
const roundedOrientation = Math.floor(orientation / 90);
|
|
|
|
| 85 |
|
| 86 |
return (
|
| 87 |
<Container x={x} y={y} interactive={true} pointerdown={onClick} cursor="pointer">
|
| 88 |
+
{/* {isThinking && (
|
| 89 |
// TODO: We'll eventually have separate assets for thinking and speech animations.
|
| 90 |
<Text x={-20} y={-10} scale={{ x: -0.8, y: 0.8 }} text={'💭'} anchor={{ x: 0.5, y: 0.5 }} />
|
| 91 |
+
)} */}
|
| 92 |
{isSpeaking && (
|
| 93 |
// TODO: We'll eventually have separate assets for thinking and speech animations.
|
| 94 |
<Text x={18} y={-10} scale={0.8} text={'💬'} anchor={{ x: 0.5, y: 0.5 }} />
|
patches/src/components/Game.tsx
CHANGED
|
@@ -92,7 +92,7 @@ export default function Game() {
|
|
| 92 |
const scrollViewRef = useRef<HTMLDivElement>(null);
|
| 93 |
|
| 94 |
const humanTokenIdentifier = useQuery(api.world.userStatus, worldId ? { worldId } : 'skip');
|
| 95 |
-
if (!worldId || !engineId || !game ) {
|
| 96 |
return null;
|
| 97 |
}
|
| 98 |
const playerId = [...game.world.players.values()].find(
|
|
|
|
| 92 |
const scrollViewRef = useRef<HTMLDivElement>(null);
|
| 93 |
|
| 94 |
const humanTokenIdentifier = useQuery(api.world.userStatus, worldId ? { worldId } : 'skip');
|
| 95 |
+
if (!worldId || !engineId || !game || !humanTokenIdentifier) {
|
| 96 |
return null;
|
| 97 |
}
|
| 98 |
const playerId = [...game.world.players.values()].find(
|
patches/src/components/PixiGame.tsx
CHANGED
|
@@ -34,6 +34,8 @@ export const PixiGame = (props: {
|
|
| 34 |
(p) => p.human === humanTokenIdentifier,
|
| 35 |
)?.id;
|
| 36 |
|
|
|
|
|
|
|
| 37 |
const moveTo = useSendInput(props.engineId, 'moveTo');
|
| 38 |
|
| 39 |
// Interaction for clicking on the world to navigate.
|
|
|
|
| 34 |
(p) => p.human === humanTokenIdentifier,
|
| 35 |
)?.id;
|
| 36 |
|
| 37 |
+
|
| 38 |
+
|
| 39 |
const moveTo = useSendInput(props.engineId, 'moveTo');
|
| 40 |
|
| 41 |
// Interaction for clicking on the world to navigate.
|
patches/src/components/Player.tsx
CHANGED
|
@@ -30,11 +30,14 @@ export const Player = ({
|
|
| 30 |
historicalTime?: number;
|
| 31 |
}) => {
|
| 32 |
const playerCharacter = game.playerDescriptions.get(player.id)?.character;
|
| 33 |
-
|
| 34 |
throw new Error(`Player ${player.id} has no character`);
|
| 35 |
}
|
| 36 |
-
|
| 37 |
-
|
|
|
|
|
|
|
|
|
|
| 38 |
const locationBuffer = game.world.historicalLocations?.get(player.id);
|
| 39 |
const historicalLocation = useHistoricalValue<Location>(
|
| 40 |
locationFields,
|
|
@@ -42,7 +45,7 @@ export const Player = ({
|
|
| 42 |
playerLocation(player),
|
| 43 |
locationBuffer,
|
| 44 |
);
|
| 45 |
-
|
| 46 |
if (!logged.has(playerCharacter)) {
|
| 47 |
logged.add(playerCharacter);
|
| 48 |
toast.error(`Unknown character ${playerCharacter}`);
|
|
|
|
| 30 |
historicalTime?: number;
|
| 31 |
}) => {
|
| 32 |
const playerCharacter = game.playerDescriptions.get(player.id)?.character;
|
| 33 |
+
if (!playerCharacter) {
|
| 34 |
throw new Error(`Player ${player.id} has no character`);
|
| 35 |
}
|
| 36 |
+
let character = characters.find((c) => c.name === playerCharacter);
|
| 37 |
+
// If it's night, use the night version of the character
|
| 38 |
+
if (game.world.gameCycle.cycleState === 'Night' && game.playerDescriptions.get(player.id)?.type === 'werewolf') {
|
| 39 |
+
character = characters.find((c) => c.name === 'c1');
|
| 40 |
+
}
|
| 41 |
const locationBuffer = game.world.historicalLocations?.get(player.id);
|
| 42 |
const historicalLocation = useHistoricalValue<Location>(
|
| 43 |
locationFields,
|
|
|
|
| 45 |
playerLocation(player),
|
| 46 |
locationBuffer,
|
| 47 |
);
|
| 48 |
+
if (!character) {
|
| 49 |
if (!logged.has(playerCharacter)) {
|
| 50 |
logged.add(playerCharacter);
|
| 51 |
toast.error(`Unknown character ${playerCharacter}`);
|
patches/src/components/buttons/InteractButton.tsx
CHANGED
|
@@ -19,7 +19,7 @@ export default function InteractButton() {
|
|
| 19 |
const oauthToken = oauth ? oauth.userInfo.fullname : undefined;
|
| 20 |
const humanTokenIdentifier = useQuery(api.world.userStatus, worldId ? { worldId, oauthToken } : 'skip');
|
| 21 |
const userPlayerId =
|
| 22 |
-
|
| 23 |
const join = useMutation(api.world.joinWorld);
|
| 24 |
const leave = useMutation(api.world.leaveWorld);
|
| 25 |
const isPlaying = !!userPlayerId;
|
|
@@ -46,6 +46,7 @@ export default function InteractButton() {
|
|
| 46 |
[convex, join, oauthToken],
|
| 47 |
);
|
| 48 |
|
|
|
|
| 49 |
const joinOrLeaveGame = () => {
|
| 50 |
if (
|
| 51 |
!worldId ||
|
|
|
|
| 19 |
const oauthToken = oauth ? oauth.userInfo.fullname : undefined;
|
| 20 |
const humanTokenIdentifier = useQuery(api.world.userStatus, worldId ? { worldId, oauthToken } : 'skip');
|
| 21 |
const userPlayerId =
|
| 22 |
+
game && [...game.world.players.values()].find((p) => p.human === humanTokenIdentifier)?.id;
|
| 23 |
const join = useMutation(api.world.joinWorld);
|
| 24 |
const leave = useMutation(api.world.leaveWorld);
|
| 25 |
const isPlaying = !!userPlayerId;
|
|
|
|
| 46 |
[convex, join, oauthToken],
|
| 47 |
);
|
| 48 |
|
| 49 |
+
|
| 50 |
const joinOrLeaveGame = () => {
|
| 51 |
if (
|
| 52 |
!worldId ||
|