Spaces:
Sleeping
Sleeping
import gradio as gr | |
# from sentence_transformers import SentenceTransformer, util | |
# | |
# model_name = 'nq-distilbert-base-v1' | |
# bi_encoder = SentenceTransformer("./") | |
# top_k = 5 | |
# sentences = [ | |
# "a happy person is a person how can do what he want with his money", | |
# "That is a happy dog ho bark alot", | |
# "Today is a sunny day so that a happy person can walk on the street" | |
# ] | |
# # vector embeddings created from dataset | |
# corpus_embeddings = bi_encoder.encode(sentences, convert_to_tensor=True, show_progress_bar=True) | |
# | |
# def search(query): | |
# # Encode the query using the bi-encoder and find potentially relevant passages | |
# question_embedding = bi_encoder.encode(query) | |
# hits = util.semantic_search(question_embedding, corpus_embeddings, top_k=top_k) | |
# hits = hits[0] # Get the hits for the first query | |
# | |
# # Output of top-k hits | |
# print("Input question:", query) | |
# print("Results") | |
# for hit in hits: | |
# print("\t{:.3f}\t{}".format(hit['score'], sentences[hit['corpus_id']])) | |
# return hits | |
# | |
# def greet(name): | |
# hittt = search(query=name) | |
# x=dict() | |
# for hit in hittt: | |
# score=hit['score'] | |
# sentence=sentences[hit['corpus_id']] | |
# buffer={sentence:score} | |
# x.update(buffer) | |
# return x | |
import dill | |
def greet1(data): | |
# pdf=data.get('pdf') | |
print(data) | |
x=eval(data) | |
y=x.get('pdf') | |
print(y) | |
print(type(y)) | |
print(type(dill.loads(eval(y)))) | |
print(dill.loads(eval(y)).read(),"dah el data el file") | |
return y | |
iface = gr.Blocks() | |
with iface: | |
name = gr.Textbox(label="Name") | |
output = gr.Textbox(label="Output Box") | |
# greet_btn = gr.Button("Greet") | |
# greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet") | |
greet1_btn = gr.Button("Greet1") | |
greet1_btn.click(fn=greet1, inputs=name, outputs=output, api_name="testing") | |
iface.launch() |