Lite-gpt.4 / litegpt_cache.py
chillguyyyyyyyyyyer's picture
Create litegpt_cache.py
58db7ed verified
raw
history blame contribute delete
471 Bytes
import streamlit as st
@st.cache_resource
def cache_gpt_response(prompt, model):
import openai
openai.api_key = "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"
response = openai.ChatCompletion.create(
model=model,
messages=[{"role": "user", "content": prompt}]
)
return response["choices"][0]["message"]["content"]