justest
update
96e2d26
raw history blame
No virus
2.66 kB
import os
import time
import json
import random
from g4f import Model, ChatCompletion, Provider
from flask import Flask, request, Response
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
@app.route("/chat/completions", methods=['POST'])
def chat_completions():
streaming = request.json.get('stream', False)
model = request.json.get('model', 'gpt-3.5-turbo')
messages = request.json.get('messages')
response = ChatCompletion.create(model=model, stream=streaming,
messages=messages)
if not streaming:
while 'curl_cffi.requests.errors.RequestsError' in response:
response = ChatCompletion.create(model=model, stream=streaming,
messages=messages)
completion_timestamp = int(time.time())
completion_id = ''.join(random.choices(
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', k=28))
return {
'id': 'chatcmpl-%s' % completion_id,
'object': 'chat.completion',
'created': completion_timestamp,
'model': model,
'usage': {
'prompt_tokens': None,
'completion_tokens': None,
'total_tokens': None
},
'choices': [{
'message': {
'role': 'assistant',
'content': response
},
'finish_reason': 'stop',
'index': 0
}]
}
def stream():
for token in response:
completion_timestamp = int(time.time())
completion_id = ''.join(random.choices(
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', k=28))
completion_data = {
'id': f'chatcmpl-{completion_id}',
'object': 'chat.completion.chunk',
'created': completion_timestamp,
'model': 'gpt-3.5-turbo-0301',
'choices': [
{
'delta': {
'content': token
},
'index': 0,
'finish_reason': None
}
]
}
yield 'data: %s\n\n' % json.dumps(completion_data, separators=(',' ':'))
time.sleep(0.1)
return app.response_class(stream(), mimetype='text/event-stream')
if __name__ == '__main__':
config = {
'host': '0.0.0.0',
'port': 1337,
'debug': True
}
app.run(**config)