pvanand commited on
Commit
2e6b166
1 Parent(s): b4b055b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +7 -3
main.py CHANGED
@@ -55,11 +55,15 @@ class QueryModel(BaseModel):
55
 
56
  class NewsQueryModel(BaseModel):
57
  query: str = Field(..., description="News topic to search for")
58
-
 
 
 
59
  class Config:
60
  schema_extra = {
61
  "example": {
62
- "query": "Latest developments in AI"
 
63
  }
64
  }
65
 
@@ -260,7 +264,7 @@ async def news_assistant(query: NewsQueryModel, api_key: str = Depends(verify_ap
260
  raise HTTPException(status_code=500, detail="Failed to fetch news data")
261
 
262
  def process_response():
263
- for content in chat_with_llama_stream(messages, model="meta-llama/llama-3-70b-instruct"):
264
  yield content
265
  #meta-llama/llama-3-70b-instruct google/gemini-pro-1.5
266
  return StreamingResponse(process_response(), media_type="text/event-stream")
 
55
 
56
  class NewsQueryModel(BaseModel):
57
  query: str = Field(..., description="News topic to search for")
58
+ model_id: ModelID = Field(
59
+ default="meta-llama/llama-3-70b-instruct",
60
+ description="ID of the model to use for response generation"
61
+ )
62
  class Config:
63
  schema_extra = {
64
  "example": {
65
+ "query": "Latest developments in AI",
66
+ "model_id": "meta-llama/llama-3-70b-instruct"
67
  }
68
  }
69
 
 
264
  raise HTTPException(status_code=500, detail="Failed to fetch news data")
265
 
266
  def process_response():
267
+ for content in chat_with_llama_stream(messages, model=query.model_id):
268
  yield content
269
  #meta-llama/llama-3-70b-instruct google/gemini-pro-1.5
270
  return StreamingResponse(process_response(), media_type="text/event-stream")