File size: 1,427 Bytes
a55bd12 e56ab4c a55bd12 e56ab4c e52448c a55bd12 e52448c c540ed1 e52448c a55bd12 e56ab4c a55bd12 cc1c550 e52448c 6e01b3a 8ea7168 6e01b3a e52448c 6e01b3a a4c00fa a55bd12 e52448c a55bd12 b944e20 e52448c b944e20 6e01b3a f0b7935 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from llama_index.llms.mistralai import MistralAI
from llama_index.embeddings.mistralai import MistralAIEmbedding
from llama_index.core.settings import Settings
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex
import gradio as gr
from gradio_pdf import PDF
import os
api_key = 'Of59Qz8Enr4fVj11XoKLRkNHENULLpLt'
my_list=['open-mistral-7b', 'open-mixtral-8x7b', 'mistral-small-latest','mistral-medium-latest','mistral-large-latest']
mdel= my_list[3]
llm = MistralAI(api_key=api_key, model=mdel)
embed_model = MistralAIEmbedding(model_name='mistral-embed', api_key=api_key)
Settings.llm = llm
Settings.embed_model = embed_model
def qa(model: str, question: str, doc: str, mdel: str) -> str:
if mdel != model:
mdel= model
llm = MistralAI(api_key=api_key, model=mdel)
my_pdf = SimpleDirectoryReader(input_files=[doc]).load_data()
my_pdf_index = VectorStoreIndex.from_documents(my_pdf)
my_pdf_engine = my_pdf_index.as_query_engine()
question = "tu n'utile pas la langue anglaises, tu reponds en francais, " + question
response = my_pdf_engine.query(question)
#response = question + " " + str(response)
return response
demo = gr.Interface(
qa,
[ gr.Dropdown(choices=my_list, label="model",value=mdel),
gr.Textbox(label="Question"), PDF(label="Document")],
gr.Textbox())
if __name__ == "__main__":
demo.launch(auth=("username", "password"))
|