|
|
import asyncio |
|
|
import os |
|
|
import time |
|
|
import orjson |
|
|
from fastapi import FastAPI, WebSocket, WebSocketDisconnect |
|
|
from fastapi.responses import JSONResponse |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app = FastAPI(title="Dummy Python AI", version="1.0.0") |
|
|
|
|
|
START_TS = time.time() |
|
|
|
|
|
def j(obj) -> str: |
|
|
|
|
|
return orjson.dumps(obj).decode("utf-8") |
|
|
|
|
|
@app.get("/health") |
|
|
def health(): |
|
|
return JSONResponse({ |
|
|
"ok": True, |
|
|
"service": "dummy-ai", |
|
|
"uptime_sec": round(time.time() - START_TS, 2) |
|
|
}) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.websocket("/ws/ai") |
|
|
async def ws_ai(ws: WebSocket): |
|
|
await ws.accept() |
|
|
await ws.send_text(j({"type": "ready", "msg": "Dummy AI online"})) |
|
|
|
|
|
current_task = None |
|
|
current_cancel = asyncio.Event() |
|
|
|
|
|
async def stream_dummy_answer(prompt: str): |
|
|
"""Stream a staged, convincing dummy answer with tokens and say-cues.""" |
|
|
try: |
|
|
|
|
|
await ws.send_text(j({"type":"log","msg":f"Received task: {prompt[:120]}"})) |
|
|
await asyncio.sleep(0.2) |
|
|
|
|
|
|
|
|
phases = [ |
|
|
"Analyzing your request", |
|
|
"Planning steps", |
|
|
"Executing subtask 1", |
|
|
"Executing subtask 2", |
|
|
"Compiling results" |
|
|
] |
|
|
for ph in phases: |
|
|
if current_cancel.is_set(): return |
|
|
await ws.send_text(j({"type":"log","msg":ph})) |
|
|
await asyncio.sleep(0.35) |
|
|
|
|
|
|
|
|
answer = ( |
|
|
"Sure — here’s a dummy streamed response to verify your end-to-end pipeline. " |
|
|
"I’m emitting short tokens so your client UI can show them live, " |
|
|
"and your TTS can speak them as they arrive." |
|
|
) |
|
|
|
|
|
await ws.send_text(j({"type":"say","text":"Starting response."})) |
|
|
|
|
|
for token in answer.split(" "): |
|
|
if current_cancel.is_set(): return |
|
|
await ws.send_text(j({"type":"token","text":token + " "})) |
|
|
await asyncio.sleep(0.06) |
|
|
|
|
|
if current_cancel.is_set(): return |
|
|
|
|
|
await asyncio.sleep(0.15) |
|
|
await ws.send_text(j({"type":"say","text":"Response complete."})) |
|
|
await ws.send_text(j({"type":"done","result":"OK"})) |
|
|
|
|
|
except Exception as e: |
|
|
await ws.send_text(j({"type":"error","msg":str(e)})) |
|
|
|
|
|
try: |
|
|
while True: |
|
|
raw = await ws.receive_text() |
|
|
try: |
|
|
msg = orjson.loads(raw) |
|
|
except Exception: |
|
|
await ws.send_text(j({"type":"error","msg":"Invalid JSON"})) |
|
|
continue |
|
|
|
|
|
mtype = msg.get("type") |
|
|
if mtype == "telemetry": |
|
|
|
|
|
await ws.send_text(j({ |
|
|
"type":"log", |
|
|
"msg": f"Telemetry cpu={msg.get('cpu')} mem={msg.get('mem')} active={msg.get('active_window')}" |
|
|
})) |
|
|
continue |
|
|
|
|
|
if mtype == "cancel": |
|
|
current_cancel.set() |
|
|
await ws.send_text(j({"type":"log","msg":"Cancel requested"})) |
|
|
continue |
|
|
|
|
|
if mtype == "task": |
|
|
|
|
|
if current_task and not current_task.done(): |
|
|
current_cancel.set() |
|
|
with contextlib.suppress(asyncio.CancelledError): |
|
|
current_task.cancel() |
|
|
current_task = None |
|
|
await asyncio.sleep(0.05) |
|
|
|
|
|
|
|
|
current_cancel = asyncio.Event() |
|
|
prompt = str(msg.get("text", "")).strip() or "(empty)" |
|
|
current_task = asyncio.create_task(stream_dummy_answer(prompt)) |
|
|
continue |
|
|
|
|
|
await ws.send_text(j({"type":"error","msg":f"Unknown message type '{mtype}'"})) |
|
|
|
|
|
except WebSocketDisconnect: |
|
|
|
|
|
return |
|
|
except Exception as e: |
|
|
try: |
|
|
await ws.send_text(j({"type":"error","msg":str(e)})) |
|
|
finally: |
|
|
return |
|
|
|
|
|
|
|
|
if _name_ == "_main_": |
|
|
import uvicorn, contextlib |
|
|
uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("PORT", "7860"))) |