awacke1's picture
Update app.py
738a092
raw
history blame
1.97 kB
import streamlit as st
from streamlit_chat import message
# from utils import get_initial_message, get_chatgpt_response, update_chat
import os
from dotenv import load_dotenv
load_dotenv()
import openai
from openai import ChatCompletion
openai.api_key = os.getenv('OPENAI_KEY')
# keys are here: https://platform.openai.com/auth/callback?code=ReZ4izEw0DwkUKrHR-Opxr5AMMgo9SojxC9pNHQUcjD6M&state=OGZFNDJmLlJGNlIwOUxlakpXZkVFfjNxNy02ZlFtLWN4eUcuOXJobXouSQ%3D%3D#
# Read this, https://blog.futuresmart.ai/building-a-gpt-4-chatbot-using-chatgpt-api-and-streamlit-chat
# Define a function to chat with the model
def chat_with_model(prompts):
model = "gpt-3.5-turbo" # change this to the model you're using
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])
response = openai.ChatCompletion.create(model=model, messages=conversation)
return response['choices'][0]['message']['content']
import openai
import streamlit as st
from streamlit_chat import message
import os
from dotenv import load_dotenv
load_dotenv('api_key.env')
def generate_response(prompt):
completion=openai.Completion.create(
engine='text-davinci-003',
prompt=prompt,
max_tokens=1024,
n=1,
stop=None,
temperature=0.6,
)
message=completion.choices[0].text
return message
# Streamlit App
def main():
st.title("Chat with AI")
# Pre-defined prompts
prompts = ['How's the weather?', 'Tell me a joke.', 'What is the meaning of life?']
# User prompt input
user_prompt = st.text_input("Your question:", '')
if user_prompt:
prompts.append(user_prompt)
if st.button('Chat'):
st.write('Chatting with GPT-3...')
response = chat_with_model(prompts)
st.write('Response:')
st.write(response)
if __name__ == "__main__":
main()