import uuid
from fastapi import FastAPI, Request, Response
from fastapi.responses import StreamingResponse, JSONResponse
import time
from typing import Generator, Dict
import requests
import json
import logging
import sqlite3
import threading
app = FastAPI()
logger = logging.getLogger(__name__)
# 配置logging模块
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
lock = threading.Lock()

def get_db_connection():
    """Get a database connection with context manager"""
    return sqlite3.connect('logs.db')

def insert_to_db(request_id: str, data: dict, ret_data: str = None):
    """Insert or update log data into the database"""
    try:
        with lock, get_db_connection() as conn:
            cursor = conn.cursor()
            
            # Validate input data
            if not isinstance(data, dict):
                raise ValueError("Data must be a dictionary")
                
            # Check if there is an existing record with the same request_id
            cursor.execute("SELECT * FROM logs WHERE request_id=?", (request_id,))
            existing_record = cursor.fetchone()
            
            if existing_record:
                # If a record exists, update it
                cursor.execute('''UPDATE logs SET data=?, ret_data=? WHERE request_id=?''', 
                             (json.dumps(data, ensure_ascii=False), ret_data, request_id))
            else:
                # Otherwise, insert a new record
                if ret_data is not None:
                    cursor.execute('''INSERT INTO logs (request_id, data, ret_data) VALUES (?, ?, ?)''', 
                                 (request_id, json.dumps(data, ensure_ascii=False), ret_data)) 
                else:
                    cursor.execute('''INSERT INTO logs (request_id, data) VALUES (?, ?)''', 
                                 (request_id, json.dumps(data, ensure_ascii=False)))
    except Exception as e:
        logger.error(f"Database operation failed: {str(e)}")
        raise

def parse_all(data):
    role = None
    contents = []
    for chunk in data:
        if not (chunk.startswith('data:') and '[DONE]' not in chunk):
            continue
        try:
            chunk_data = json.loads(chunk[5:])
            delta = chunk_data['choices'][0]['delta']
            contents.append(delta['content'])
            if role is None:
                role = delta['role']
        except (IndexError, KeyError, ValueError) as e:
            logger.warning(f"Invalid JSON format in line '{chunk}': {str(e)}")
            continue
    return json.dumps({'role': role, 'content': ''.join(contents)}, ensure_ascii=False)

def generate_sse_events(url: str, data: Dict) -> Generator[str, None, None]:
    """从指定URL获取事件流并转发"""
    headers = {"content-type": "application/json"}
    try:
        request_id=str(uuid.uuid4())
        insert_to_db(request_id, data)  # write data to the logs table
        logger.info(f"request id {request_id} request: {data}")
        response = requests.post(url, json=data, headers=headers, stream=True, timeout=30)
        if response.status_code != 200:
            yield f"Event: error\nData: Request failed with status code {response.status_code}\n\n"
            return
    except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
        yield f"Event: error\nData: Request failed: {str(e)}\n\n"
        return
    response_data=[]
    for line in response.iter_lines():
        if line:
            response_data.append(line.decode('utf-8'))
            yield line.decode('utf-8')+'\n\n'
    # write ret_data to the logs table
    ret_data=parse_all(response_data)
    insert_to_db(request_id, data, ret_data)
    logger.info(f"request id {request_id} response: {ret_data}",)
    yield f"Event: response\nData: {ret_data}\n\n"

def generate_non_sse_response(url: str, data: Dict) -> Dict:
    """Handle non-SSE responses by collecting all data and returning at once"""
    headers = {"content-type": "application/json"}
    try:
        request_id = str(uuid.uuid4())
        insert_to_db(request_id, data)  # write data to the logs table
        logger.info(f"request id {request_id} request: {data}")
        response = requests.post(url, json=data, headers=headers, timeout=30)
        if response.status_code != 200:
            return {"error": f"Request failed with status code {response.status_code}"}
        
        response_data = response.json()
        ret_data = json.dumps(response_data, ensure_ascii=False)
        insert_to_db(request_id, data, ret_data)
        logger.info(f"request id {request_id} response: {ret_data}")
        return response_data
    except Exception as e:
        logger.error(f"Non-SSE request failed: {str(e)}")
        return {"error": str(e)}

@app.post("/v1/chat/completions")
async def handle_completions(request: Request):
    """Handle both SSE and non-SSE completions requests"""
    try:
        url = os.getenv('UPSTREAM_API_URL', "https://api.deepseek.com/v1/chat/completions")
        data = await request.json()
        
        # Basic input validation
        if not isinstance(data, dict):
            raise ValueError("Request body must be a JSON object")
            
        # Check if streaming is requested
        if data.get('stream', False):
            event_generator = generate_sse_events(url, data)
            return StreamingResponse(event_generator, 
                                   media_type="text/event-stream", 
                                   headers={"X-Request-ID": str(uuid.uuid4())})
        else:
            return generate_non_sse_response(url, data)
    except Exception as e:
        logger.error(f"Request processing failed: {str(e)}")
        return JSONResponse(status_code=400, content={"error": str(e)})

def initialize_db():
    """Initialize the database schema"""
    with get_db_connection() as conn:
        cursor = conn.cursor()
        cursor.execute('''CREATE TABLE IF NOT EXISTS logs 
                        (request_id TEXT PRIMARY KEY, 
                         data TEXT, 
                         ret_data TEXT,
                         created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
        conn.commit()

if __name__ == "__main__":
    import uvicorn
    import os
    
    # Initialize database
    initialize_db()
    
    # Start the server
    uvicorn.run(app, 
               host=os.getenv('HOST', '0.0.0.0'), 
               port=int(os.getenv('PORT', '8200')))
