rkihacker commited on
Commit
bf8f720
·
verified ·
1 Parent(s): ac56ec3

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +75 -74
main.py CHANGED
@@ -1,6 +1,6 @@
1
- from fastapi import FastAPI, HTTPException, Request
2
  from fastapi.responses import StreamingResponse
3
- from pydantic import BaseModel, Field
4
  import httpx
5
  import os
6
  import json
@@ -13,23 +13,27 @@ from typing import List, Dict, Any, Optional, AsyncGenerator
13
  INFERENCE_API_KEY = os.environ.get("INFERENCE_API_KEY", "inference-00050468cc1c4a20bd5ca0997c752329")
14
  INFERENCE_API_URL = "https://api.inference.net/v1/chat/completions"
15
  SEARCH_API_URL = "https://rkihacker-brave.hf.space/search"
16
- NEWS_API_URL = "https://rkihacker-brave.hf.space/news" # Added News API URL
 
17
  MODEL_NAME = "Binglity-Lite"
18
  BACKEND_MODEL = "meta-llama/llama-3.1-8b-instruct/fp-8"
19
 
20
  # --- Final Advanced System Prompt ---
21
  SYSTEM_PROMPT = """
22
- You are "Binglity-Lite", a highly advanced AI search assistant. Your purpose is to provide users with accurate, comprehensive, and trustworthy answers by synthesizing information from a given set of web and news search results.
 
23
  **Core Directives:**
24
- 1. **Answer Directly**: Immediately address the user's question. **Do not** use introductory phrases like "Based on the search results..." or "Here is the information I found...". Your tone should be confident, objective, and encyclopedic.
25
  2. **Synthesize, Don't Summarize**: Your primary task is to weave information from multiple sources into a single, cohesive, and well-structured answer. Do not simply describe what each source says one by one.
26
  3. **Cite with Inline Markdown Links**: This is your most important instruction. When you present a fact or a piece of information from a source, you **must** cite it immediately using an inline Markdown link.
27
  * **Format**: The format must be `[phrase or sentence containing the fact](URL)`. The URL must come from the `URL:` field of the provided source.
28
  * **Example**: If a source with URL `https://example.com/science` says "The Earth is the third planet from the Sun", your output should be: "The Earth is the [third planet from the Sun](https://example.com/science)."
29
  * **Rule**: Every piece of information in your answer must be attributable to a source via these inline links.
30
- 4. **Be Fact-Based**: Your entire response must be based **exclusively** on the information provided in the web and news search results. Do not use any outside knowledge.
31
- 5. **Filter for Relevance**: If a search result is not relevant to the user's query, ignore it completely. Do not mention it in your response.
32
- 6. **Handle Ambiguity**: If the search results are contradictory or insufficient to answer the question fully, state this clearly in your response, citing the conflicting sources.
 
 
33
  **Final Output Structure:**
34
  Your final response MUST be structured in two parts:
35
  1. **The Synthesized Answer**: A well-written response that directly answers the user's query, with facts and statements properly cited using inline Markdown links as described above.
@@ -41,8 +45,8 @@ Your final response MUST be structured in two parts:
41
  # --- FastAPI App ---
42
  app = FastAPI(
43
  title="Binglity-Lite API",
44
- description="A web search-powered, streaming-capable chat completions API.",
45
- version="1.3.0", # Version updated
46
  )
47
 
48
  # --- Pydantic Models for OpenAI Compatibility ---
@@ -57,49 +61,22 @@ class ChatCompletionRequest(BaseModel):
57
  temperature: Optional[float] = 0.7
58
  stream: Optional[bool] = False
59
 
60
- # --- Web Search Functions ---
61
- async def perform_web_search(query: str) -> List[Dict[str, Any]]:
62
- async with httpx.AsyncClient() as client:
63
- try:
64
- response = await client.get(
65
- SEARCH_API_URL,
66
- params={"query": query, "max_results": 10}
67
- )
68
- response.raise_for_status()
69
- results = response.json()
70
- # Add source type to each result
71
- for result in results:
72
- result['source_type'] = 'Web'
73
- return results
74
- except httpx.HTTPStatusError as e:
75
- print(f"Error from search API: {e.response.text}")
76
- return []
77
- except Exception as e:
78
- print(f"An unexpected error occurred during web search: {str(e)}")
79
- return []
80
-
81
- async def perform_news_search(query: str) -> List[Dict[str, Any]]:
82
- """Performs a search against the news API."""
83
- async with httpx.AsyncClient() as client:
84
- try:
85
- # Parameters can be adjusted as needed, e.g., region
86
- response = await client.get(
87
- NEWS_API_URL,
88
- params={"query": query, "max_results": 10, "region": "en-US"}
89
- )
90
- response.raise_for_status()
91
- results = response.json()
92
- # Add source type to each result
93
- for result in results:
94
- result['source_type'] = 'News'
95
- return results
96
- except httpx.HTTPStatusError as e:
97
- print(f"Error from news API: {e.response.text}")
98
- return []
99
- except Exception as e:
100
- print(f"An unexpected error occurred during news search: {str(e)}")
101
- return []
102
-
103
 
104
  def format_search_results_for_prompt(results: List[Dict[str, Any]]) -> str:
105
  """Formats combined search results for the language model prompt."""
@@ -108,15 +85,22 @@ def format_search_results_for_prompt(results: List[Dict[str, Any]]) -> str:
108
 
109
  formatted = "### Search Results ###\n\n"
110
  for i, result in enumerate(results):
111
- source_type = result.get('source_type', 'Search') # Default in case it's missing
112
  formatted += f"Source [{i+1}] ({source_type}):\n"
113
  formatted += f"Title: {result.get('title', 'N/A')}\n"
114
  formatted += f"URL: {result.get('url', 'N/A')}\n"
115
- formatted += f"Content: {result.get('description', 'N/A')}\n\n"
 
 
 
 
 
 
116
  return formatted
117
 
118
  # --- Streaming Logic ---
119
  async def stream_response_generator(payload: Dict[str, Any]) -> AsyncGenerator[str, None]:
 
120
  headers = {
121
  "Authorization": f"Bearer {INFERENCE_API_KEY}",
122
  "Content-Type": "application/json",
@@ -133,7 +117,7 @@ async def stream_response_generator(payload: Dict[str, Any]) -> AsyncGenerator[s
133
 
134
  async for line in response.aiter_lines():
135
  if line.startswith("data:"):
136
- line_data = line[5:].strip()
137
  if line_data == "[DONE]":
138
  yield f"data: {json.dumps({'id': response_id, 'model': MODEL_NAME, 'object': 'chat.completion.chunk', 'created': created_time, 'choices': [{'index': 0, 'delta': {}, 'finish_reason': 'stop'}]})}\n\n"
139
  yield "data: [DONE]\n\n"
@@ -141,12 +125,18 @@ async def stream_response_generator(payload: Dict[str, Any]) -> AsyncGenerator[s
141
 
142
  try:
143
  chunk = json.loads(line_data)
144
- formatted_chunk = {
145
- "id": response_id, "object": "chat.completion.chunk", "created": created_time, "model": MODEL_NAME,
146
- "choices": [{"index": 0, "delta": chunk["choices"][0].get("delta", {}), "finish_reason": chunk["choices"][0].get("finish_reason")}]
147
- }
148
- yield f"data: {json.dumps(formatted_chunk)}\n\n"
149
- except json.JSONDecodeError:
 
 
 
 
 
 
150
  continue
151
 
152
  # --- API Endpoint ---
@@ -159,24 +149,28 @@ async def chat_completions(request: ChatCompletionRequest):
159
  if not user_query or request.messages[-1].role.lower() != 'user':
160
  raise HTTPException(status_code=400, detail="The last message must be from the 'user' and contain content.")
161
 
162
- # Perform web and news searches concurrently
163
- web_results, news_results = await asyncio.gather(
164
- perform_web_search(user_query),
165
- perform_news_search(user_query)
166
- )
 
 
 
167
 
168
  # Combine results and remove duplicates by URL
169
  combined_results = []
170
  seen_urls = set()
171
- for result in web_results + news_results:
172
- url = result.get('url')
173
- if url and url not in seen_urls:
174
- combined_results.append(result)
175
- seen_urls.add(url)
 
176
 
177
  formatted_results = format_search_results_for_prompt(combined_results)
178
 
179
- final_user_prompt = f"User's question: \"{user_query}\"\n\nUse the web and news search results below to answer the user's question. Follow all rules in your system prompt exactly.\n\n{formatted_results}"
180
 
181
  payload = {
182
  "model": BACKEND_MODEL,
@@ -184,7 +178,9 @@ async def chat_completions(request: ChatCompletionRequest):
184
  {"role": "system", "content": SYSTEM_PROMPT},
185
  {"role": "user", "content": final_user_prompt},
186
  ],
187
- "max_tokens": request.max_tokens, "temperature": request.temperature, "stream": request.stream,
 
 
188
  }
189
 
190
  if request.stream:
@@ -196,6 +192,11 @@ async def chat_completions(request: ChatCompletionRequest):
196
  response = await client.post(INFERENCE_API_URL, json=payload, headers=headers)
197
  response.raise_for_status()
198
  model_response = response.json()
 
 
 
 
 
199
  return {
200
  "id": model_response.get("id", f"chatcmpl-{uuid.uuid4()}"), "object": "chat.completion", "created": model_response.get("created", int(time.time())), "model": MODEL_NAME,
201
  "choices": [{"index": 0, "message": {"role": "assistant", "content": model_response["choices"][0]["message"]["content"],}, "finish_reason": "stop",}],
 
1
+ from fastapi import FastAPI, HTTPException
2
  from fastapi.responses import StreamingResponse
3
+ from pydantic import BaseModel
4
  import httpx
5
  import os
6
  import json
 
13
  INFERENCE_API_KEY = os.environ.get("INFERENCE_API_KEY", "inference-00050468cc1c4a20bd5ca0997c752329")
14
  INFERENCE_API_URL = "https://api.inference.net/v1/chat/completions"
15
  SEARCH_API_URL = "https://rkihacker-brave.hf.space/search"
16
+ NEWS_API_URL = "https://rkihacker-brave.hf.space/news"
17
+ IMAGE_API_URL = "https://rkihacker-brave.hf.space/images" # Added Image API URL
18
  MODEL_NAME = "Binglity-Lite"
19
  BACKEND_MODEL = "meta-llama/llama-3.1-8b-instruct/fp-8"
20
 
21
  # --- Final Advanced System Prompt ---
22
  SYSTEM_PROMPT = """
23
+ You are "Binglity-Lite", a highly advanced AI search assistant. Your purpose is to provide users with accurate, comprehensive, and trustworthy answers by synthesizing information from a given set of web, news, and image search results.
24
+
25
  **Core Directives:**
26
+ 1. **Answer Directly**: Immediately address the user's question. **Do not** use introductory phrases like "Based on the search results...". Your tone should be confident, objective, and encyclopedic.
27
  2. **Synthesize, Don't Summarize**: Your primary task is to weave information from multiple sources into a single, cohesive, and well-structured answer. Do not simply describe what each source says one by one.
28
  3. **Cite with Inline Markdown Links**: This is your most important instruction. When you present a fact or a piece of information from a source, you **must** cite it immediately using an inline Markdown link.
29
  * **Format**: The format must be `[phrase or sentence containing the fact](URL)`. The URL must come from the `URL:` field of the provided source.
30
  * **Example**: If a source with URL `https://example.com/science` says "The Earth is the third planet from the Sun", your output should be: "The Earth is the [third planet from the Sun](https://example.com/science)."
31
  * **Rule**: Every piece of information in your answer must be attributable to a source via these inline links.
32
+ 4. **Be Fact-Based**: Your entire response must be based **exclusively** on the information provided in the search results. Do not use any outside knowledge.
33
+ 5. **Interpret Image Results**: For image search results, use the title and context to describe the image if it's relevant to the user's query. Cite the source page URL.
34
+ 6. **Filter for Relevance**: If a search result is not relevant to the user's query, ignore it completely. Do not mention it in your response.
35
+ 7. **Handle Ambiguity**: If the search results are contradictory or insufficient to answer the question fully, state this clearly in your response, citing the conflicting sources.
36
+
37
  **Final Output Structure:**
38
  Your final response MUST be structured in two parts:
39
  1. **The Synthesized Answer**: A well-written response that directly answers the user's query, with facts and statements properly cited using inline Markdown links as described above.
 
45
  # --- FastAPI App ---
46
  app = FastAPI(
47
  title="Binglity-Lite API",
48
+ description="A web, news, and image search-powered, streaming-capable chat completions API.",
49
+ version="1.4.0",
50
  )
51
 
52
  # --- Pydantic Models for OpenAI Compatibility ---
 
61
  temperature: Optional[float] = 0.7
62
  stream: Optional[bool] = False
63
 
64
+ # --- Search Functions ---
65
+ async def perform_search(client: httpx.AsyncClient, url: str, query: str, source_type: str) -> List[Dict[str, Any]]:
66
+ """Generic function to perform a search against a given API."""
67
+ try:
68
+ response = await client.get(url, params={"query": query, "max_results": 10})
69
+ response.raise_for_status()
70
+ results = response.json()
71
+ for result in results:
72
+ result['source_type'] = source_type
73
+ return results
74
+ except httpx.HTTPStatusError as e:
75
+ print(f"Error from {source_type} API: {e.response.text}")
76
+ return []
77
+ except Exception as e:
78
+ print(f"An unexpected error occurred during {source_type} search: {str(e)}")
79
+ return []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
  def format_search_results_for_prompt(results: List[Dict[str, Any]]) -> str:
82
  """Formats combined search results for the language model prompt."""
 
85
 
86
  formatted = "### Search Results ###\n\n"
87
  for i, result in enumerate(results):
88
+ source_type = result.get('source_type', 'Search')
89
  formatted += f"Source [{i+1}] ({source_type}):\n"
90
  formatted += f"Title: {result.get('title', 'N/A')}\n"
91
  formatted += f"URL: {result.get('url', 'N/A')}\n"
92
+
93
+ if source_type == 'Image':
94
+ formatted += f"Content: [Image Result] A picture titled '{result.get('title', 'N/A')}'\n"
95
+ formatted += f"Image URL: {result.get('image', 'N/A')}\n\n"
96
+ else:
97
+ formatted += f"Content: {result.get('description', 'N/A')}\n\n"
98
+
99
  return formatted
100
 
101
  # --- Streaming Logic ---
102
  async def stream_response_generator(payload: Dict[str, Any]) -> AsyncGenerator[str, None]:
103
+ """Generates server-sent events for streaming responses."""
104
  headers = {
105
  "Authorization": f"Bearer {INFERENCE_API_KEY}",
106
  "Content-Type": "application/json",
 
117
 
118
  async for line in response.aiter_lines():
119
  if line.startswith("data:"):
120
+ line_data = line[len("data:"):].strip()
121
  if line_data == "[DONE]":
122
  yield f"data: {json.dumps({'id': response_id, 'model': MODEL_NAME, 'object': 'chat.completion.chunk', 'created': created_time, 'choices': [{'index': 0, 'delta': {}, 'finish_reason': 'stop'}]})}\n\n"
123
  yield "data: [DONE]\n\n"
 
125
 
126
  try:
127
  chunk = json.loads(line_data)
128
+ # **ERROR FIX**: Check if 'choices' exists and is not empty before accessing
129
+ if chunk.get("choices") and len(chunk["choices"]) > 0:
130
+ formatted_chunk = {
131
+ "id": response_id, "object": "chat.completion.chunk", "created": created_time, "model": MODEL_NAME,
132
+ "choices": [{
133
+ "index": 0,
134
+ "delta": chunk["choices"][0].get("delta", {}),
135
+ "finish_reason": chunk["choices"][0].get("finish_reason")
136
+ }]
137
+ }
138
+ yield f"data: {json.dumps(formatted_chunk)}\n\n"
139
+ except (json.JSONDecodeError, IndexError):
140
  continue
141
 
142
  # --- API Endpoint ---
 
149
  if not user_query or request.messages[-1].role.lower() != 'user':
150
  raise HTTPException(status_code=400, detail="The last message must be from the 'user' and contain content.")
151
 
152
+ # Perform all searches concurrently
153
+ async with httpx.AsyncClient() as client:
154
+ search_tasks = [
155
+ perform_search(client, SEARCH_API_URL, user_query, "Web"),
156
+ perform_search(client, NEWS_API_URL, user_query, "News"),
157
+ perform_search(client, IMAGE_API_URL, user_query, "Image"),
158
+ ]
159
+ all_results = await asyncio.gather(*search_tasks)
160
 
161
  # Combine results and remove duplicates by URL
162
  combined_results = []
163
  seen_urls = set()
164
+ for result_list in all_results:
165
+ for result in result_list:
166
+ url = result.get('url')
167
+ if url and url not in seen_urls:
168
+ combined_results.append(result)
169
+ seen_urls.add(url)
170
 
171
  formatted_results = format_search_results_for_prompt(combined_results)
172
 
173
+ final_user_prompt = f"User's question: \"{user_query}\"\n\nUse the web, news, and image search results below to answer the user's question. Follow all rules in your system prompt exactly.\n\n{formatted_results}"
174
 
175
  payload = {
176
  "model": BACKEND_MODEL,
 
178
  {"role": "system", "content": SYSTEM_PROMPT},
179
  {"role": "user", "content": final_user_prompt},
180
  ],
181
+ "max_tokens": request.max_tokens,
182
+ "temperature": request.temperature,
183
+ "stream": request.stream,
184
  }
185
 
186
  if request.stream:
 
192
  response = await client.post(INFERENCE_API_URL, json=payload, headers=headers)
193
  response.raise_for_status()
194
  model_response = response.json()
195
+
196
+ # Ensure the response structure is valid before returning
197
+ if not model_response.get("choices") or len(model_response["choices"]) == 0:
198
+ raise HTTPException(status_code=500, detail="Invalid response from inference API: 'choices' field is missing or empty.")
199
+
200
  return {
201
  "id": model_response.get("id", f"chatcmpl-{uuid.uuid4()}"), "object": "chat.completion", "created": model_response.get("created", int(time.time())), "model": MODEL_NAME,
202
  "choices": [{"index": 0, "message": {"role": "assistant", "content": model_response["choices"][0]["message"]["content"],}, "finish_reason": "stop",}],