File size: 1,831 Bytes
5992448
ab19b96
f9f8030
 
 
 
 
 
 
 
 
 
c4b67c7
 
f9f8030
 
 
 
c859ac8
f9f8030
 
 
 
 
 
 
 
5992448
9f19b7a
 
 
 
 
 
cfb14fd
f9f8030
5992448
cfb14fd
eaeb55d
5992448
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import gradio as gr
import requests
import os
from pathlib import Path
from llama_index.embeddings import HuggingFaceEmbedding, VoyageEmbedding
from llama_index import (load_index_from_storage, ServiceContext, StorageContext, VectorStoreIndex)
from llama_index import download_loader, SimpleDirectoryReader
from llama_index.retrievers import RecursiveRetriever
from llama_index.query_engine import RetrieverQueryEngine
from llama_index.llms import Anyscale

# Define the inference model
llm = Anyscale(model="mistralai/Mistral-7B-Instruct-v0.1", api_key=os.getenv("ANYSCALE_API_KEY"))
#llm = Anyscale(model="HuggingFaceH4/zephyr-7b-beta", api_key=os.getenv("ANYSCALE_API_KEY"))
# Define the embedding model used to embed the query.
# query_embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-base-en-v1.5")
embed_model = VoyageEmbedding(model_name="voyage-01", voyage_api_key=os.getenv("VOYAGE_API_KEY"))
service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model)
storage_context = StorageContext.from_defaults(persist_dir=Path("./custom_index"))

# Load the vector stores that were created earlier.
index = load_index_from_storage(storage_context=storage_context, service_context=service_context)   

# Define query engine:
index_engine = index.as_retriever(similarity_top_k=4)
index_retriever = RecursiveRetriever("vector",retriever_dict={"vector": index_engine})
query_engine = RetrieverQueryEngine.from_args(index_retriever, service_context=service_context) 

def authenticate(username, password):
    if username == "Gribouille" and password == "A jamais les premiers":
        return True
    else:
        return False

def predict(query):
    return str(query_engine.query(query))

iface = gr.Interface(fn=predict, inputs=["text"], outputs="text")
iface.launch(auth=authenticate, share=True)