Spaces:
Runtime error
Runtime error
Upload app.py
Browse files
app.py
CHANGED
|
@@ -178,25 +178,71 @@ def stream_huggingface(python, model_name):
|
|
| 178 |
timeout=60
|
| 179 |
)
|
| 180 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 181 |
if response.status_code == 200:
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
else:
|
| 192 |
-
|
| 193 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 194 |
yield f"β³ Model is loading... This may take 20-30 seconds. Please try again."
|
| 195 |
else:
|
| 196 |
-
yield f"β Error from Hugging Face
|
|
|
|
| 197 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
except Exception as e:
|
| 199 |
-
yield f"β
|
|
|
|
| 200 |
|
| 201 |
def optimize(python, model):
|
| 202 |
"""Convert Python to C++ using selected AI model"""
|
|
|
|
| 178 |
timeout=60
|
| 179 |
)
|
| 180 |
|
| 181 |
+
# Check if response body is empty
|
| 182 |
+
if not response.text or len(response.text.strip()) == 0:
|
| 183 |
+
yield f"β³ Model is loading or initializing...\n\n"
|
| 184 |
+
yield f"This happens on first use. Please try again in 30-60 seconds.\n\n"
|
| 185 |
+
yield f"π‘ Quick alternative: Use GPT-4o or Claude-3.5-Sonnet (instant results!)"
|
| 186 |
+
return
|
| 187 |
+
|
| 188 |
if response.status_code == 200:
|
| 189 |
+
try:
|
| 190 |
+
result = response.json()
|
| 191 |
+
if isinstance(result, list) and len(result) > 0:
|
| 192 |
+
generated_text = result[0].get("generated_text", "")
|
| 193 |
+
else:
|
| 194 |
+
generated_text = result.get("generated_text", "")
|
| 195 |
+
|
| 196 |
+
if not generated_text or len(generated_text.strip()) == 0:
|
| 197 |
+
yield f"β οΈ Model returned empty response.\n\n"
|
| 198 |
+
yield f"Try again or use GPT-4o/Claude-3.5-Sonnet instead."
|
| 199 |
+
return
|
| 200 |
+
|
| 201 |
+
# Clean up the response
|
| 202 |
+
reply = generated_text.replace('```cpp\n','').replace('```','')
|
| 203 |
+
yield reply
|
| 204 |
+
except ValueError as json_err:
|
| 205 |
+
# JSON parsing failed
|
| 206 |
+
yield f"β οΈ Model response format error.\n\n"
|
| 207 |
+
yield f"The model might still be warming up. Try again in 30 seconds.\n\n"
|
| 208 |
+
yield f"π‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
|
| 209 |
+
|
| 210 |
+
elif response.status_code == 401 or response.status_code == 403:
|
| 211 |
+
# Authentication error - need HF token
|
| 212 |
+
yield f"π Authentication Required!\n\n"
|
| 213 |
+
yield f"To use open-source models, you need a FREE Hugging Face token:\n\n"
|
| 214 |
+
yield f"1. Get token: https://huggingface.co/settings/tokens\n"
|
| 215 |
+
yield f"2. Add HF_TOKEN secret in Space Settings\n"
|
| 216 |
+
yield f"3. Factory reboot\n\n"
|
| 217 |
+
yield f"OR use GPT-4o/Claude-3.5-Sonnet instead (they work now!)"
|
| 218 |
+
elif response.status_code == 503:
|
| 219 |
+
# Service unavailable - model loading
|
| 220 |
+
yield f"β³ Model is currently loading (cold start)...\n\n"
|
| 221 |
+
yield f"This can take 30-60 seconds on first use.\n"
|
| 222 |
+
yield f"Please wait a minute and try again.\n\n"
|
| 223 |
+
yield f"π‘ Quick solution: Use GPT-4o or Claude-3.5-Sonnet (no waiting!)"
|
| 224 |
else:
|
| 225 |
+
try:
|
| 226 |
+
error_msg = response.json().get("error", "Unknown error")
|
| 227 |
+
except:
|
| 228 |
+
error_msg = response.text[:200] if response.text else "Empty response"
|
| 229 |
+
|
| 230 |
+
if "loading" in str(error_msg).lower():
|
| 231 |
yield f"β³ Model is loading... This may take 20-30 seconds. Please try again."
|
| 232 |
else:
|
| 233 |
+
yield f"β Error from Hugging Face (HTTP {response.status_code}):\n{error_msg}\n\n"
|
| 234 |
+
yield f"π‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for now (they're working!)"
|
| 235 |
|
| 236 |
+
except requests.exceptions.Timeout:
|
| 237 |
+
yield f"β±οΈ Request timed out.\n\n"
|
| 238 |
+
yield f"Model might be loading (cold start). Try again in 30-60 seconds.\n\n"
|
| 239 |
+
yield f"π‘ Or use GPT-4o/Claude-3.5-Sonnet for instant results!"
|
| 240 |
+
except requests.exceptions.RequestException as req_err:
|
| 241 |
+
yield f"π Network error: {str(req_err)}\n\n"
|
| 242 |
+
yield f"π‘ Please use GPT-4o or Claude-3.5-Sonnet instead."
|
| 243 |
except Exception as e:
|
| 244 |
+
yield f"β Unexpected error: {str(e)}\n\n"
|
| 245 |
+
yield f"π‘ Tip: Use GPT-4o or Claude-3.5-Sonnet for reliable results!"
|
| 246 |
|
| 247 |
def optimize(python, model):
|
| 248 |
"""Convert Python to C++ using selected AI model"""
|