Spaces:
Runtime error
Runtime error
File size: 3,488 Bytes
bbd5c76 06323bb 8735569 06323bb bbd5c76 8735569 bbd5c76 15e4873 bbd5c76 8a1e454 bbd5c76 5d46e47 cee06d9 06323bb bbd5c76 f5e2959 bbd5c76 f5e2959 2eca6f3 bbd5c76 fdefcb0 f82f6a1 fdefcb0 9eb3d05 619f1a8 bbd5c76 9040cf3 4f3359c bbd5c76 06323bb bbd5c76 f82f6a1 bbd5c76 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
import gradio as gr
from huggingface_hub import InferenceClient
import requests
from bs4 import BeautifulSoup
from bs4.element import Comment
def get_text_from_url(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
texts = soup.find_all(text=True)
visible_texts = filter(tag_visible, texts)
return u"\n".join(t.strip() for t in visible_texts)
def tag_visible(element):
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, Comment):
return False
return True
text_list = []
homepage_url = "https://sites.google.com/view/abhilashnandy/home/"
extensions = ["", "pmrf-profile-page"]
for ext in extensions:
url_text = get_text_from_url(homepage_url+ext)
text_list.append(url_text)
# Repeat for sub-links if necessary
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("stabilityai/stablelm-2-1_6b-chat")#("stabilityai/stablelm-2-1_6b-chat")#("TheBloke/TinyLlama-1.1B-Chat-v1.0-GPTQ")#("TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF")#("QuantFactory/Meta-Llama-3-8B-Instruct-GGUF")#("HuggingFaceH4/zephyr-7b-beta")
SYSTEM_MESSAGE = "You are a QA chatbot to answer queries (in less than 30 words) on my homepage that has the following information -\n\n" + "\n\n".join(text_list) + "\n\n"
def respond(
message,
history: list[tuple[str, str]],
system_message=SYSTEM_MESSAGE,
max_tokens=140,
temperature=0.7,
top_p=0.95,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if len(val) >= 1:
messages.append({"role": "user", "content": "Question: " + val[0]})
if len(val) >= 2:
messages.append({"role": "assistant", "content": "Answer: " + val[1]})
messages.append({"role": "user", "content": message})
try:
response = client.chat_completion(
messages,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
# stream=True, # Disable streaming for debugging
)
return response.choices[0].message["content"]
except Exception as e:
print(f"An error occurred: {e}")
return str(e) #"An error occurred while processing the response."
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
# initial_message = [("user", "Yo who dis Abhilash?")]
markdown_note = "## Ask Anything About Me! (Might show a tad bit of hallucination!)"
demo = gr.Blocks()
with demo:
gr.Markdown(markdown_note)
gr.ChatInterface(
respond,
examples = ["Yo who dis Abhilash?", "What is Abhilash's most recent publication?"],
# message=initial_message,
additional_inputs=[
# gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
# gr.Slider(minimum=1, maximum=8192, value=512, step=1, label="Max new tokens"),
# gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
# gr.Slider(
# minimum=0.1,
# maximum=1.0,
# value=0.95,
# step=0.05,
# label="Top-p (nucleus sampling)",
# ),
],
# value=initial_message
)
if __name__ == "__main__":
demo.launch() |