Spaces:
Runtime error
Runtime error
File size: 2,528 Bytes
f4a091c df0f591 f4a091c 66104a6 340532d 66104a6 f4a091c fc0cdf2 f4a091c fc0cdf2 f4a091c fc0cdf2 f4a091c fc0cdf2 f4a091c fc0cdf2 f4a091c fc0cdf2 f4a091c 66104a6 14f2f1a f4a091c 21a3057 f4a091c 66104a6 fc0cdf2 21a3057 6797eac 996f9b6 66104a6 996f9b6 df39685 c3807de f4a091c 21a3057 2d06113 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
# -*- coding: utf-8 -*-
"""Copy of Copy of Chatbot with custom knowledge base
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1VSXUmag_76fzebs16YhW_as4mdhHNdkx
"""
#pip install llama-index
#pip install langchain
#pip install gradio
#pip install pandas
#pip install openpyxl
import pandas as pd
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain import OpenAI
import sys
import os
from IPython.display import Markdown, display
import pandas as pd
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain import OpenAI
from IPython.display import Markdown, display
import gradio as gr
df = pd.read_excel('Shegardi_dataset.xlsx', sheet_name='dataset')
os.environ['OPENAI_API_KEY'] = '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'
def construct_index(directory_path):
# set maximum input size
max_input_size = 4096
# set number of output tokens
num_outputs = 2000
# set maximum chunk overlap
max_chunk_overlap = 20
# set chunk size limit
chunk_size_limit = 600
# define LLM
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="text-davinci-003", max_tokens=num_outputs))
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
documents = SimpleDirectoryReader(directory_path).load_data()
index = GPTSimpleVectorIndex(
documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper
)
# Fix for the error message
index.registry.add_type_to_struct(1, {"answer": str, "question": str})
index.save_to_disk('index.json')
return index
# construct_index("context_data/data")
# Include other necessary imports here
def ask_ai(query):
index = GPTSimpleVectorIndex.load_from_disk('index.json')
response = index.query(query, response_mode="compact")
return response.response
iface = gr.Interface(fn=ask_ai, inputs="text", outputs="text",
title="The following is a conversation with a human called Shegardi. Shegardi is helpful, precise, truthful, and very friendly. Also, Shegardi is an employee of Warba Bank, located in Kuwait. Shegardi will only use the information provided to him. ",
description="Enter a question and get an answer from Shegardi.")
iface.launch()
|