Merge pull request #64 from noobydp/main
Browse filesFurther changes to support OLLAMA_API_BASE_URL with ollama models
- .gitignore +1 -1
- app/lib/.server/llm/api-key.ts +2 -0
- app/lib/.server/llm/model.ts +7 -4
.gitignore
CHANGED
|
@@ -12,7 +12,7 @@ dist-ssr
|
|
| 12 |
*.local
|
| 13 |
|
| 14 |
.vscode/*
|
| 15 |
-
|
| 16 |
!.vscode/extensions.json
|
| 17 |
.idea
|
| 18 |
.DS_Store
|
|
|
|
| 12 |
*.local
|
| 13 |
|
| 14 |
.vscode/*
|
| 15 |
+
.vscode/launch.json
|
| 16 |
!.vscode/extensions.json
|
| 17 |
.idea
|
| 18 |
.DS_Store
|
app/lib/.server/llm/api-key.ts
CHANGED
|
@@ -34,6 +34,8 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
|
| 34 |
switch (provider) {
|
| 35 |
case 'OpenAILike':
|
| 36 |
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
|
|
|
|
|
|
| 37 |
default:
|
| 38 |
return "";
|
| 39 |
}
|
|
|
|
| 34 |
switch (provider) {
|
| 35 |
case 'OpenAILike':
|
| 36 |
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
| 37 |
+
case 'Ollama':
|
| 38 |
+
return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL;
|
| 39 |
default:
|
| 40 |
return "";
|
| 41 |
}
|
app/lib/.server/llm/model.ts
CHANGED
|
@@ -57,6 +57,12 @@ export function getGroqModel(apiKey: string, model: string) {
|
|
| 57 |
return openai(model);
|
| 58 |
}
|
| 59 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
export function getDeepseekModel(apiKey: string, model: string){
|
| 61 |
const openai = createOpenAI({
|
| 62 |
baseURL: 'https://api.deepseek.com/beta',
|
|
@@ -65,9 +71,6 @@ export function getDeepseekModel(apiKey: string, model: string){
|
|
| 65 |
|
| 66 |
return openai(model);
|
| 67 |
}
|
| 68 |
-
export function getOllamaModel(model: string) {
|
| 69 |
-
return ollama(model);
|
| 70 |
-
}
|
| 71 |
|
| 72 |
export function getOpenRouterModel(apiKey: string, model: string) {
|
| 73 |
const openRouter = createOpenRouter({
|
|
@@ -99,6 +102,6 @@ export function getModel(provider: string, model: string, env: Env) {
|
|
| 99 |
case 'Mistral':
|
| 100 |
return getMistralModel(apiKey, model);
|
| 101 |
default:
|
| 102 |
-
return getOllamaModel(model);
|
| 103 |
}
|
| 104 |
}
|
|
|
|
| 57 |
return openai(model);
|
| 58 |
}
|
| 59 |
|
| 60 |
+
export function getOllamaModel(baseURL: string, model: string) {
|
| 61 |
+
let Ollama = ollama(model);
|
| 62 |
+
Ollama.config.baseURL = `${baseURL}/api`;
|
| 63 |
+
return Ollama;
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
export function getDeepseekModel(apiKey: string, model: string){
|
| 67 |
const openai = createOpenAI({
|
| 68 |
baseURL: 'https://api.deepseek.com/beta',
|
|
|
|
| 71 |
|
| 72 |
return openai(model);
|
| 73 |
}
|
|
|
|
|
|
|
|
|
|
| 74 |
|
| 75 |
export function getOpenRouterModel(apiKey: string, model: string) {
|
| 76 |
const openRouter = createOpenRouter({
|
|
|
|
| 102 |
case 'Mistral':
|
| 103 |
return getMistralModel(apiKey, model);
|
| 104 |
default:
|
| 105 |
+
return getOllamaModel(baseURL, model);
|
| 106 |
}
|
| 107 |
}
|