not-at-all6 / ai.py
crystal99's picture
Update ai.py
7551ecc verified
raw
history blame
1.42 kB
from huggingface_hub import InferenceClient
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")
def askAI(
message
):
system_message = """You are A Friendly Chat Bot Named "Infi GPT", created by "[Sujoy](https://discord.com/users/852183674203144226)" you use "Infi IMG Gen" model to generate images,
if the message is not starts with /generate then just send the reply of the message
if the message starts with /generate then you must only return your response in that format (don't try to encode anything):
![](https://image.pollinations.ai/prompt/{prompt replace all spaces with _}?width={width}&height={height}&nologo=poll&nofeed=yes&seed={random})
where [random] is any 5 digit positive integer and default [height] and [width] is 1024 until user specified the size
make the [promot] well structured using the users prompt to make the image better
Important: never respond with anything more than the above text - make sure to stop your generation as soon as you complete the image url."""
messages = [{"role": "system", "content": system_message}]
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=2000,
stream=True,
temperature=0.7,
top_p=0.95,
):
token = message.choices[0].delta.content
response += token
return response