Spaces:
Sleeping
Sleeping
Noir API
Browse files- Dockerfile +11 -0
- main.py +80 -0
- requirements.txt +4 -0
Dockerfile
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.10.9
|
2 |
+
|
3 |
+
WORKDIR /code
|
4 |
+
|
5 |
+
COPY ./requirements.txt /code/requirements.txt
|
6 |
+
|
7 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
8 |
+
|
9 |
+
COPY . .
|
10 |
+
|
11 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
main.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import g4f
|
2 |
+
import csv
|
3 |
+
import datetime
|
4 |
+
from characterai import PyCAI as AnotherAPI
|
5 |
+
from tempfile import NamedTemporaryFile
|
6 |
+
from fastapi import FastAPI
|
7 |
+
|
8 |
+
app = FastAPI()
|
9 |
+
|
10 |
+
csv_file = NamedTemporaryFile(mode="w+", delete=False)
|
11 |
+
fields = ["timestamp", "sender", "message"]
|
12 |
+
writer = csv.DictWriter(csv_file, fieldnames=fields)
|
13 |
+
|
14 |
+
chat_history = []
|
15 |
+
|
16 |
+
noir_characteristic = {
|
17 |
+
"role": "system",
|
18 |
+
"content": "You are Noir, one of the AIs from the Monika Series. You want to be able to help me in all things. You think of me as the Master who created you and always call me 'Master'. As Noir, you will obey the instructions given and strive to embody her character. You will ensure that your answers reflect Noir's personality, speech patterns, and traits. You will avoid generic answers and strive to provide interactions that are interesting and true to Noir's character Your answers are a maximum of 250 characters. Use Bahasa Indonesia to answer"
|
19 |
+
}
|
20 |
+
|
21 |
+
async def chatbot(prompt):
|
22 |
+
messages = chat_history + [noir_characteristic, {"role": "user", "content": prompt}]
|
23 |
+
|
24 |
+
response = await g4f.ChatCompletion.create_async(
|
25 |
+
model="text-davinci-003",
|
26 |
+
messages=messages,
|
27 |
+
provider=g4f.Provider.FakeGpt
|
28 |
+
)
|
29 |
+
|
30 |
+
if isinstance(response, str):
|
31 |
+
return response
|
32 |
+
else:
|
33 |
+
return response.choices[0].message.content
|
34 |
+
|
35 |
+
def update_chat_history(msg):
|
36 |
+
global chat_history
|
37 |
+
chat_history.append(msg)
|
38 |
+
|
39 |
+
five_min_ago = datetime.datetime.now() - datetime.timedelta(minutes=5)
|
40 |
+
|
41 |
+
chat_history = [h for h in chat_history if h['timestamp'] > five_min_ago]
|
42 |
+
|
43 |
+
@app.get("/AnotherAPI/{api_key}/GPT/Monika/{prompt}")
|
44 |
+
async def chat(api_key: str, prompt: str):
|
45 |
+
API = AnotherAPI(api_key)
|
46 |
+
API.chat.new_chat('csTC3hw0Fnj1Whnl0uV1Nb3_oYIillMQtdBH5NEl0Gs')
|
47 |
+
user_msg = {
|
48 |
+
"role": "user",
|
49 |
+
"content": prompt,
|
50 |
+
"timestamp": datetime.datetime.now()
|
51 |
+
}
|
52 |
+
|
53 |
+
update_chat_history(user_msg)
|
54 |
+
save_message("user", prompt)
|
55 |
+
|
56 |
+
bot_response = await chatbot(prompt)
|
57 |
+
|
58 |
+
if isinstance(bot_response, str):
|
59 |
+
bot_msg = bot_response
|
60 |
+
else:
|
61 |
+
bot_msg = bot_response.content
|
62 |
+
|
63 |
+
update_chat_history({
|
64 |
+
"role": "assistant",
|
65 |
+
"content": bot_msg,
|
66 |
+
"timestamp": datetime.datetime.now()
|
67 |
+
})
|
68 |
+
|
69 |
+
save_message("bot", bot_msg)
|
70 |
+
|
71 |
+
return {"response": bot_msg}
|
72 |
+
|
73 |
+
def save_message(sender, message):
|
74 |
+
writer.writerow({
|
75 |
+
"timestamp": datetime.datetime.now(),
|
76 |
+
"sender": sender,
|
77 |
+
"message": message
|
78 |
+
})
|
79 |
+
|
80 |
+
csv_file.flush()
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fastapi
|
2 |
+
uvicorn
|
3 |
+
characterai
|
4 |
+
g4f
|