Spaces:
Paused
Paused
import os | |
# OPENAI_API_KEY = os.environ['Open_AI_Key'] | |
# HF_Key = os.environ['HF_Key'] | |
print('OPENAI_API_KEY' in os.environ) | |
print('HF_Key' in os.environ) | |
print(os.environ['OPENAI_API_KEY']) | |
print(os.environ['HF_Key']) | |
import openai | |
import json | |
# from llama_index import GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt | |
# from langchain import OpenAI | |
# # handling data on space | |
# from huggingface_hub import HfFileSystem | |
# fs = HfFileSystem(token=HF_Key) | |
# text_list = fs.ls("datasets/GoChat/Gochat247_Data/Data", detail=False) | |
# data = fs.read_text(text_list[0]) | |
# from llama_index import Document | |
# doc = Document(data) | |
# docs = [] | |
# docs.append(doc) | |
# # define LLM | |
# llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003")) | |
# # define prompt helper | |
# # set maximum input size | |
# max_input_size = 4096 | |
# # set number of output tokens | |
# num_output = 256 | |
# # set maximum chunk overlap | |
# max_chunk_overlap = 20 | |
# prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap) | |
# service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) | |
# index = GPTSimpleVectorIndex.from_documents(docs) | |
# ## Define Chat BOT Class to generate Response , handle chat history, | |
# class Chatbot: | |
# def __init__(self, index): | |
# self.index = index | |
# openai.api_key = OPENAI_API_KEY | |
# self.chat_history = [] | |
# QA_PROMPT_TMPL = ( | |
# "Answer without 'Answer:' word." | |
# "you are in a converation with Gochat247's web site visitor\n" | |
# "user got into this conversation to learn more about Gochat247" | |
# "you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n" | |
# "you will be friendy and welcoming\n" | |
# "The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n" | |
# "The below is the previous chat with the user\n" | |
# "---------------------\n" | |
# "{context_str}" | |
# "\n---------------------\n" | |
# "Given the context information and the chat history, and not prior knowledge\n" | |
# "\nanswer the question : {query_str}\n" | |
# "\n it is ok if you don not know the answer. and ask for infomration \n" | |
# "Please provide a brief and concise but friendly response.") | |
# self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL) | |
# def generate_response(self, user_input): | |
# prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]]) | |
# prompt += f"\nUser: {user_input}" | |
# self.QA_PROMPT.context_str = prompt | |
# response = index.query(user_input, text_qa_template=self.QA_PROMPT) | |
# message = {"role": "assistant", "content": response.response} | |
# self.chat_history.append({"role": "user", "content": user_input}) | |
# self.chat_history.append(message) | |
# return message | |
# def load_chat_history(self, filename): | |
# try: | |
# with open(filename, 'r') as f: | |
# self.chat_history = json.load(f) | |
# except FileNotFoundError: | |
# pass | |
# def save_chat_history(self, filename): | |
# with open(filename, 'w') as f: | |
# json.dump(self.chat_history, f) | |
# ## Define Chat BOT Class to generate Response , handle chat history, | |
# bot = Chatbot(index=index) | |
# import webbrowser | |
# import gradio as gr | |
# import time | |
# with gr.Blocks(theme='SebastianBravo/simci_css') as demo: | |
# with gr.Column(scale=4): | |
# title = 'GoChat247 AI BOT' | |
# chatbot = gr.Chatbot(label='GoChat247 AI BOT') | |
# msg = gr.Textbox() | |
# clear = gr.Button("Clear") | |
# def user(user_message, history): | |
# return "", history + [[user_message, None]] | |
# def agent(history): | |
# last_user_message = history[-1][0] | |
# agent_message = bot.generate_response(last_user_message) | |
# history[-1][1] = agent_message ["content"] | |
# time.sleep(1) | |
# return history | |
# msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(agent, chatbot, chatbot) | |
# clear.click(lambda: None, None, chatbot, queue=False) | |
# print(webbrowser.get()) | |
# # handling dark_theme | |
# # def apply_dark_theme(url): | |
# # if not url.endswith('?__theme=dark'): | |
# # webbrowser.open_new(url + '?__theme=dark') | |
# # gradioURL = 'http://localhost:7860/' | |
# # apply_dark_theme(gradioURL) | |
# if __name__ == "__main__": | |
# demo.launch() |