import requests
import json
import sys
from datetime import datetime

def send_ollama_request(prompt, model="qwen2-0.5b", stream=True):
    url = "http://localhost:11434/api/chat"
    
    data = {
        "model": model,
        "messages": [
            {
                "role": "user",
                "content": prompt
            }
        ],
        "stream": stream
    }
    
    try:
        response = requests.post(url, json=data, stream=stream)
        response.raise_for_status()  # 检查响应状态
        
        if stream:
            full_response = ""
            for line in response.iter_lines():
                if line:
                    chunk = json.loads(line.decode('utf-8'))
                    if 'message' in chunk and 'content' in chunk['message']:
                        content = chunk['message']['content']
                        full_response += content
            
            # 返回处理后的完整响应
            return {
                "status": "success",
                "timestamp": datetime.now().isoformat(),
                "request": {
                    "model": model,
                    "prompt": prompt
                },
                "response": full_response
            }
        else:
            # 非流式响应直接返回
            return response.json()
                
    except requests.exceptions.RequestException as e:
        return {
            "status": "error",
            "error_type": "request_error",
            "message": str(e),
            "timestamp": datetime.now().isoformat()
        }
    except json.JSONDecodeError as e:
        return {
            "status": "error",
            "error_type": "json_error",
            "message": str(e),
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        return {
            "status": "error",
            "error_type": "unknown_error",
            "message": str(e),
            "timestamp": datetime.now().isoformat()
        }

# python ollamaCurl.py "你是谁" 
if __name__ == "__main__":
    # 从命令行参数获取提示词，如果没有则使用默认值
    prompt = sys.argv[1] if len(sys.argv) > 1 else "你好"
    
    # 发送请求并获取响应
    result = send_ollama_request(prompt)
    
    # 格式化输出 JSON
    print(json.dumps(result, ensure_ascii=False, indent=2)) 