Spaces:
Running
Running
File size: 1,568 Bytes
1c0e6cc 28954f6 7a4f38b 1c0e6cc 7a4f38b 1c0e6cc 7a4f38b abcc348 7a4f38b 1c0e6cc 7a4f38b abcc348 7a4f38b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
from fastapi import FastAPI, Request
from pydantic import BaseModel
import requests
import os
app = FastAPI()
class RequestBody(BaseModel):
model: str
key_body: str
text: str
@app.post("/api/v1")
async def generate_response(request_body: RequestBody):
input_text = request_body.text
model = request_body.model
key_true = os.environ['key']
key_body = request_body.key_body
if key_body == key_true:
if model == "gemini":
key_gemini = os.environ['key_gemini']
headers = {'Content-Type': 'application/json',}
params = {'key': key_gemini}
json_data = {'contents': [{'parts': [{'text': input_text}]}]}
response = requests.post('https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash-latest:generateContent',params=params,headers=headers,json=json_data,)
all_chunk = response.json()['candidates'][0]['content']['parts'][0]['text']
if model == 'groq':
key_groq = os.environ['key_groq']
headers = {'Authorization': f'Bearer {key_groq}','Content-Type': 'application/json'}
json_data = {'messages': [{'role': 'user','content': input_text}],'model': 'llama-3.1-70b-versatile',}
response = requests.post('https://api.groq.com/openai/v1/chat/completions', headers=headers, json=json_data)
all_chunk = response.json()["choices"][0]["message"]["content"]
if key_body != key_true:
all_chunk = "How's the hack going?"
return {"response": all_chunk} |