Spaces:
Running
Running
File size: 1,939 Bytes
839ca71 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
import json
import http.client
from openai import AzureOpenAI
class ContentFormatter:
@staticmethod
def chat_completions(text, settings_params):
message = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": text}
]
data = {"messages": message, **settings_params}
return json.dumps(data)
class AzureAgent:
def __init__(self, api_key, azure_uri, deployment_name):
self.azure_uri = azure_uri
self.headers = {
'Authorization': f"Bearer {api_key}",
'Content-Type': 'application/json'
}
self.deployment_name = deployment_name
self.chat_formatter = ContentFormatter
def invoke(self, text, **kwargs):
body = self.chat_formatter.chat_completions(text, {**kwargs})
conn = http.client.HTTPSConnection(self.azure_uri)
conn.request("POST", f'/v1/chat/completions', body=body, headers=self.headers)
response = conn.getresponse()
data = response.read()
conn.close()
decoded_data = data.decode("utf-8")
parsed_data = json.loads(decoded_data)
content = parsed_data["choices"][0]["message"]["content"]
return content
class GPTAgent:
def __init__(self, api_key, azure_endpoint, deployment_name, api_version):
self.client = AzureOpenAI(
api_key=api_key,
api_version=api_version,
azure_endpoint=azure_endpoint
)
self.deployment_name = deployment_name
def invoke(self, text, **kwargs):
response = self.client.chat.completions.create(
model=self.deployment_name,
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": text}
],
**kwargs
)
return response.choices[0].message.content
|