import os import gradio as gr from embedchain import App from embedchain.config.vectordb.pinecone import PineconeDBConfig import os #Initializing the Embedchain application with open source configuration config = { 'llm': { 'provider': 'huggingface', 'config': { 'model': 'mistralai/Mistral-7B-Instruct-v0.2', 'top_p': 0.5 } }, 'embedder': { 'provider': 'huggingface', 'config': { 'model': 'sentence-transformers/all-mpnet-base-v2' }, }, 'vectordb': { 'provider': 'pinecone', 'config': { 'metric': 'cosine', 'vector_dimension': 768, 'index_name': 'vox' } } } app = App.from_config(config=config) # # Add data to the app # app.add("https://voxiitk.com/scrapping-of-galaxy24/") # app.add("https://voxiitk.com/administration-responds-to-students-senates-demands/") # app.add("https://voxiitk.com/yeh-hall-ab-13-na-raha/") # app.add("https://voxiitk.com/girls-protest-at-directors-residence/") # app.add("https://www.iitk.ac.in/doaa/data/UG-Manual.pdf") # app.add("student_data.student_data.json") def query(message, history): answer = app.query(message) start_index = answer.find("Answer:") + 7 output = answer[start_index:] return output demo = gr.ChatInterface(query) demo.launch()