EasyDetect / pipeline /openai_wrapper.py
sunnychenxiwang's picture
update all
24c4def
raw
history blame contribute delete
No virus
2.41 kB
from openai import OpenAI, AsyncOpenAI
import os
import base64
import httpx
import asyncio
# sk-jD8DeGdJKrdOxpiQ5bD4845bB53346C3A0E9Ed479bE08676
# https://oneapi.xty.app/v1
class SyncChat:
def __init__(self, model, api_key, base_url=None):
if base_url != None:
self.sync_client = OpenAI(base_url=base_url,api_key=api_key)
else:
self.sync_client = OpenAI(api_key=api_key)
self.model = model
def get_response(self, message, temperature=0.2, max_tokens=1024):
response = self.sync_client.chat.completions.create(
model=self.model,
messages=message,
temperature=temperature,
max_tokens=max_tokens)
return response.choices[0].message.content
class AsyncChat:
def __init__(self, model, api_key, base_url=None):
if base_url != None:
self.async_client = AsyncOpenAI(base_url=base_url,api_key=api_key)
else:
self.async_client = AsyncOpenAI(api_key=api_key)
self.model = model
async def get_response(self, messages,temperature=0.2,max_tokens=1024):
async def openai_reply(message):
response = await self.async_client.chat.completions.create(
model=self.model,
messages=message,
temperature=temperature,
max_tokens=max_tokens,)
return response.choices[0].message.content
response_list = [openai_reply(message) for message in messages]
return await asyncio.gather(*response_list)
class VisionChat:
def __init__(self, model, api_key, base_url=None):
if base_url != None:
self.client = OpenAI(base_url=base_url,api_key=api_key,http_client=httpx.Client(
base_url="https://oneapi.xty.app/v1",
follow_redirects=True,
),)
else:
self.client = OpenAI(api_key=api_key)
self.model = model
def get_response(self, message, temperature=0.2, max_tokens=1024):
response = self.client.chat.completions.create(
model=self.model,
messages=message,
# temperature=temperature,
max_tokens=max_tokens)
return response.choices[0].message.content