DeepLearning101 commited on
Commit
0233ec6
1 Parent(s): 96e7487

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -49
app.py CHANGED
@@ -2,13 +2,15 @@ import gradio as gr
2
  import requests
3
  import mimetypes
4
  import json, os
 
 
5
 
6
  LLM_API = os.environ.get("LLM_API")
7
  LLM_URL = os.environ.get("LLM_URL")
8
 
9
  USER_ID = "HuggingFace Space" # Placeholder user ID
10
 
11
- def send_chat_message(LLM_URL, LLM_API, category, file_id):
12
  payload = {
13
  "inputs": {},
14
  "query": category,
@@ -24,59 +26,55 @@ def send_chat_message(LLM_URL, LLM_API, category, file_id):
24
  ]
25
  }
26
  print("Sending chat message payload:", payload) # Debug information
27
- response = requests.post(
28
- f"{LLM_URL}/chat-messages",
29
- headers={"Authorization": f"Bearer {LLM_API}"},
30
- json=payload,
31
- stream=True # Enable streaming
32
- )
33
- print("Request URL:", f"{LLM_URL}/chat-messages")
34
- print("Response status code:", response.status_code)
35
- if response.status_code == 404:
36
- return "Error: Endpoint not found (404)"
37
-
38
- # Handle the stream of events
39
- last_thought = None
40
- try:
41
- for line in response.iter_lines(decode_unicode=True):
42
- if line:
43
- try:
44
- data = json.loads(line.split("data: ")[1])
45
- if data.get("event") == "agent_thought":
46
- last_thought = data.get("thought")
47
- except (IndexError, json.JSONDecodeError):
48
- continue
49
- except requests.exceptions.JSONDecodeError:
50
- return "Error: Invalid JSON response"
51
-
52
- if last_thought:
53
- # Structure the thought text
54
- return last_thought.strip()
55
- else:
56
- return "Error: No thought found in the response"
57
 
58
- def upload_file(LLM_URL, LLM_API, file_path, user_id):
59
  if not os.path.exists(file_path):
60
  return f"Error: File {file_path} not found"
61
  mime_type, _ = mimetypes.guess_type(file_path)
62
  with open(file_path, 'rb') as f:
63
- response = requests.post(
64
- f"{LLM_URL}/files/upload",
65
- headers={"Authorization": f"Bearer {LLM_API}"},
66
- files={"file": (file_path, f, mime_type)},
67
- data={"user": user_id}
68
- )
69
- print("Upload response status code:", response.status_code) # Debug information
70
- if response.status_code == 404:
71
- return "Error: Endpoint not found (404)"
72
- print("Raw upload response text:", response.text) # Debug information
73
- try:
74
- return response.json()
75
- except requests.exceptions.JSONDecodeError:
76
- return "Error: Invalid JSON response"
 
77
 
78
- def handle_input(file_path, category):
79
- upload_response = upload_file(LLM_URL, LLM_API, file_path, USER_ID)
80
  print("Upload response:", upload_response) # Debug information
81
  if "error" in upload_response:
82
  return upload_response
@@ -84,7 +82,7 @@ def handle_input(file_path, category):
84
  if not file_id:
85
  return "Error: No file ID returned from upload"
86
 
87
- chat_response = send_chat_message(LLM_URL, LLM_API, category, file_id)
88
  print("Chat response:", chat_response) # Debug information
89
  return chat_response
90
 
 
2
  import requests
3
  import mimetypes
4
  import json, os
5
+ import asyncio
6
+ import aiohttp
7
 
8
  LLM_API = os.environ.get("LLM_API")
9
  LLM_URL = os.environ.get("LLM_URL")
10
 
11
  USER_ID = "HuggingFace Space" # Placeholder user ID
12
 
13
+ async def send_chat_message(LLM_URL, LLM_API, category, file_id):
14
  payload = {
15
  "inputs": {},
16
  "query": category,
 
26
  ]
27
  }
28
  print("Sending chat message payload:", payload) # Debug information
29
+ async with aiohttp.ClientSession() as session:
30
+ async with session.post(
31
+ f"{LLM_URL}/chat-messages",
32
+ headers={"Authorization": f"Bearer {LLM_API}"},
33
+ json=payload
34
+ ) as response:
35
+ print("Request URL:", f"{LLM_URL}/chat-messages")
36
+ print("Response status code:", response.status)
37
+ if response.status == 404:
38
+ return "Error: Endpoint not found (404)"
39
+
40
+ last_thought = None
41
+ async for line in response.content:
42
+ if line:
43
+ try:
44
+ data = json.loads(line.split(b"data: ")[1].decode("utf-8"))
45
+ if data.get("event") == "agent_thought":
46
+ last_thought = data.get("thought")
47
+ except (IndexError, json.JSONDecodeError):
48
+ continue
49
+
50
+ if last_thought:
51
+ return last_thought.strip()
52
+ else:
53
+ return "Error: No thought found in the response"
 
 
 
 
 
54
 
55
+ async def upload_file(LLM_URL, LLM_API, file_path, user_id):
56
  if not os.path.exists(file_path):
57
  return f"Error: File {file_path} not found"
58
  mime_type, _ = mimetypes.guess_type(file_path)
59
  with open(file_path, 'rb') as f:
60
+ async with aiohttp.ClientSession() as session:
61
+ async with session.post(
62
+ f"{LLM_URL}/files/upload",
63
+ headers={"Authorization": f"Bearer {LLM_API}"},
64
+ data={"file": (file_path, f, mime_type), "user": user_id}
65
+ ) as response:
66
+ print("Upload response status code:", response.status) # Debug information
67
+ if response.status == 404:
68
+ return "Error: Endpoint not found (404)"
69
+ response_text = await response.text()
70
+ print("Raw upload response text:", response_text) # Debug information
71
+ try:
72
+ return json.loads(response_text)
73
+ except json.JSONDecodeError:
74
+ return "Error: Invalid JSON response"
75
 
76
+ async def handle_input(file_path, category):
77
+ upload_response = await upload_file(LLM_URL, LLM_API, file_path, USER_ID)
78
  print("Upload response:", upload_response) # Debug information
79
  if "error" in upload_response:
80
  return upload_response
 
82
  if not file_id:
83
  return "Error: No file ID returned from upload"
84
 
85
+ chat_response = await send_chat_message(LLM_URL, LLM_API, category, file_id)
86
  print("Chat response:", chat_response) # Debug information
87
  return chat_response
88