import asyncio
from datetime import datetime
import logging
from ..core.providers import OpenAIProvider
from ..core.schemas import ModelParameter, ModelResponse, Message, ToolUseBlock
import json


from dataclasses import asdict

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s')

# Configure logging
logger = logging.getLogger('tests.cli')
provider="openai"

#provider="ollama"

mcp_server_name="postgres-mcp"

openai_model_parameter= {
      "model": "qwen3-32b",
      "title": "qwen3-32b-openai-compatible",
      "base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1",
      "provider": "openai",
      "extra_body": {"enable_thinking": False},
      "stream_options": {"include_usage":True},
    }
messages=[
    {
        "role": "system",
        "content": "You are a assistant with ability to use tool with function call and summary result for question.\n"
    },
    {
        "role": "user",
        "content": "\u5217\u51fa\u5f53\u524d\u6570\u636e\u5e93\u6240\u6709\u7528\u6237\u53ef\u4ee5\u64cd\u4f5c\u7684\u6a21\u5f0f(Schema)?"
    }
]
formatted_tools=[
    {
        "function": {
        "name": "list_extensions",
        "description": "List available and installed extensions",
        "parameters": {
            "properties": {},
            "type": "object"
        }
        },
        "type": "function"
    },
    {
        "function": {
        "name": "list_schemas",
        "description": "List all schemas in the database",
        "parameters": {
            "properties": {},
            "type": "object"
        }
        },
        "type": "function"
    },
    {
        "function": {
        "name": "list_objects",
        "description": "List objects in a schema",
        "parameters": {
            "properties": {
            "schema_name": {
                "description": "Schema name",
                "title": "Schema Name",
                "type": "string"
            },
            "object_type": {
                "default": "table",
                "description": "Object type: 'table', 'view', 'sequence', or 'extension'",
                "title": "Object Type",
                "type": "string"
            }
            },
            "required": [
            "schema_name"
            ],
            "type": "object"
        }
        },
        "type": "function"
    },
    {
        "function": {
        "name": "get_object_details",
        "description": "Show detailed information about a database object",
        "parameters": {
            "properties": {
            "schema_name": {
                "description": "Schema name",
                "title": "Schema Name",
                "type": "string"
            },
            "object_name": {
                "description": "Object name",
                "title": "Object Name",
                "type": "string"
            },
            "object_type": {
                "default": "table",
                "description": "Object type: 'table', 'view', 'sequence', or 'extension'",
                "title": "Object Type",
                "type": "string"
            }
            },
            "required": [
            "schema_name",
            "object_name"
            ],
            "type": "object"
        }
        },
        "type": "function"
    }
  ]
async def main():
    model_param=ModelParameter(**openai_model_parameter)
    model_param.stream_options=None
    #model_param.extra_body={"enable_thinking": False},
    cli=OpenAIProvider(model_param)
    stream=False
    response=await cli.chat(messages, formatted_tools, stream)
    if stream:
        try:
            async for chunk in cli._format_response_with_stream(response):
                # 处理块
                print(chunk)
        except Exception as e:
            print("流处理失败:", str(e))
    else:
        cli._format_response(response)
    
if __name__ == "__main__":
    asyncio.run(main())