liushaojie
Add application file
360d784
raw
history blame
2.02 kB
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/5/1 12:10
@Author : alexanderwu
@File : conftest.py
"""
from unittest.mock import Mock
import pytest
from metagpt.config import Config
from metagpt.logs import logger
from metagpt.provider.openai_api import OpenAIGPTAPI as GPTAPI
import asyncio
import re
class Context:
def __init__(self):
self._llm_ui = None
self._llm_api = GPTAPI()
@property
def llm_api(self):
return self._llm_api
@pytest.fixture(scope="package")
def llm_api():
logger.info("Setting up the test")
_context = Context()
yield _context.llm_api
logger.info("Tearing down the test")
@pytest.fixture(scope="function")
def mock_llm():
# Create a mock LLM for testing
return Mock()
@pytest.fixture(scope="session")
def proxy():
pattern = re.compile(
rb"(?P<method>[a-zA-Z]+) (?P<uri>(\w+://)?(?P<host>[^\s\'\"<>\[\]{}|/:]+)(:(?P<port>\d+))?[^\s\'\"<>\[\]{}|]*) "
)
async def pipe(reader, writer):
while not reader.at_eof():
writer.write(await reader.read(2048))
writer.close()
async def handle_client(reader, writer):
data = await reader.readuntil(b"\r\n\r\n")
print(f"Proxy: {data}") # checking with capfd fixture
infos = pattern.match(data)
host, port = infos.group("host"), infos.group("port")
port = int(port) if port else 80
remote_reader, remote_writer = await asyncio.open_connection(host, port)
if data.startswith(b"CONNECT"):
writer.write(b"HTTP/1.1 200 Connection Established\r\n\r\n")
else:
remote_writer.write(data)
await asyncio.gather(pipe(reader, remote_writer), pipe(remote_reader, writer))
server = asyncio.get_event_loop().run_until_complete(asyncio.start_server(handle_client, "127.0.0.1", 0))
return "http://{}:{}".format(*server.sockets[0].getsockname())
@pytest.fixture(scope="session", autouse=True)
def init_config():
Config()