Spaces:
Runtime error
Runtime error
# -*- coding: utf-8 -*- | |
"""Copy of Copy of Chatbot with custom knowledge base | |
Automatically generated by Colaboratory. | |
Original file is located at | |
https://colab.research.google.com/drive/1VSXUmag_76fzebs16YhW_as4mdhHNdkx | |
""" | |
#pip install llama-index | |
#pip install langchain | |
#pip install gradio | |
#pip install pandas | |
#pip install openpyxl | |
import pandas as pd | |
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper | |
from langchain import OpenAI | |
import sys | |
import os | |
from IPython.display import Markdown, display | |
import pandas as pd | |
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper | |
from langchain import OpenAI | |
from IPython.display import Markdown, display | |
#import streamlit as st | |
import gradio as gr | |
#import gradio | |
df = pd.read_excel('Shegardi_dataset.xlsx',sheet_name = 'dataset') | |
#os.environ['OPENAI_API_KEY'] = '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' | |
os.environ['OPENAI_API_KEY'] = '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' | |
def construct_index(directory_path): | |
# set maximum input size | |
max_input_size = 4096 | |
# set number of output tokens | |
num_outputs = 2000 | |
# set maximum chunk overlap | |
max_chunk_overlap = 20 | |
# set chunk size limit | |
chunk_size_limit = 600 | |
# define LLM | |
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="text-davinci-003", max_tokens=num_outputs)) | |
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit) | |
documents = SimpleDirectoryReader(directory_path).load_data() | |
index = GPTSimpleVectorIndex( | |
documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper | |
) | |
index.save_to_disk('index.json') | |
return index | |
#construct_index("context_data/data") | |
#import streamlit as st | |
# Include other necessary imports here | |
def is_query_about_cashback(query): | |
cashback_keywords = ["cashback", "calculate", "calculation", "reward", "points"] | |
return any(word.lower() in query.lower() for word in cashback_keywords) | |
def ask_ai(query): | |
if is_query_about_cashback(query): | |
# Extract the required information from the query or ask the user for more information if needed | |
segment = input("Enter your card segment: ") | |
total_spent = float(input("Enter your total spent amount: ")) | |
international_transactions = float(input("Enter your international transactions amount: ")) | |
local_transactions = float(input("Enter your local transactions amount: ")) | |
cashback = cashback_calculator(segment, total_spent, international_transactions, local_transactions) | |
return f"The cashback amount for your card is: {cashback:.2f}" | |
else: | |
index = GPTSimpleVectorIndex.load_from_disk('index.json') | |
response = index.query(query, response_mode="compact") | |
return response.response | |
iface = gr.Interface(fn=ask_ai, inputs=["text", "text", "number", "number", "number"], outputs="text", title="The following is a conversation with a human called Shegardi. Shegardi is helpful, precise, truthful, and very friendly. Also, Shegardi is an employee of Warba Bank, located in Kuwait. Shegardi will only use the information provided to him.", | |
description="Enter a question and get an answer from Shegardi.") | |
iface.launch() | |