import numpy as np import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer from threading import Thread import torch demo = gr.Blocks() #Hello World def greet(name, intensity): return "Hello " * intensity + name + "!" #model chatbot def predict(input, history=[]): # tokenize the new input sentence new_user_input_ids = tokenizer.encode( input + tokenizer.eos_token, return_tensors="pt" ) # append the new user input tokens to the chat history bot_input_ids = torch.cat([torch.LongTensor(history), new_user_input_ids], dim=-1) # generate a response history = model.generate( bot_input_ids, max_length=4000, pad_token_id=tokenizer.eos_token_id ).tolist() # convert the tokens to text, and then split the responses into lines response = tokenizer.decode(history[0]).split("<|endoftext|>") # print('decoded_response-->>'+str(response)) response = [ (response[i], response[i + 1]) for i in range(0, len(response) - 1, 2) ] # convert to tuples of list # print('response-->>'+str(response)) return response, history ## for Code predicr tab class StopOnTokens(StoppingCriteria): def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool: stop_ids = [29, 0] for stop_id in stop_ids: if input_ids[0][-1] == stop_id: return True return False def predict_code(message, history): history_transformer_format = history + [[message, ""]] stop = StopOnTokens() messages = "".join(["".join(["\n:"+item[0], "\n:"+item[1]]) #curr_system_message + for item in history_transformer_format]) model_inputs = tokenizer([messages], return_tensors="pt").to("cuda") streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True) generate_kwargs = dict( model_inputs, streamer=streamer, max_new_tokens=1024, do_sample=True, top_p=0.95, top_k=1000, temperature=1.0, num_beams=1, stopping_criteria=StoppingCriteriaList([stop]) ) t = Thread(target=model.generate, kwargs=generate_kwargs) t.start() partial_message = "" for new_token in streamer: if new_token != '<': partial_message += new_token yield partial_message def category_tab(): with gr.Tab(label="Hello World👋", elem_id="news-tab"): gr.Markdown("## Hello **All** and Welcome to Center Of Excellence x HuggingFace Town Hall ", elem_id="margin-top") gr.Interface( fn=greet, inputs=["text", "slider"], outputs=["text"], ) with gr.Tab(label="Spaces & Docs"): with gr.Row(elem_id="spaces-flex"): gr.Markdown("###### Simple Spaces/Sandbox Space & Doc") gr.Button( value="Basic Chatbot Gradio Doc", link = "https://www.gradio.app/guides/creating-a-chatbot-fast") gr.Button( value="Random Response", link = "https://huggingface.co/spaces/gradio/chatinterface_random_response") with gr.Tab(label="Basic Chat Bot🤖"): tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large") model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large") title = "🤖AI ChatBot" description = "Chatbots runing on HuggingFace Models." examples = [["How are you?"]] gr.Interface( fn=predict, title=title, description=description, examples=examples, inputs=["text", "state"], outputs=["chatbot", "state"], theme="finlaymacklon/boxy_violet", ) with gr.Tab(label="Spaces"): with gr.Row(elem_id="spaces-flex"): gr.Markdown("##### More Chat bot examples") gr.Button( value="Ai-ChatBot: With Colour theme", link = "https://hf.space/kingabzpro/AI-ChatBot") gr.Button( value="Simple Multimodal ", link = "https://huggingface.co/spaces/gradio/chatbot_multimodal") gr.Button( value="Chat with GPT4", link = "https://huggingface.co/spaces/ysharma/ChatGPT4") gr.Button( value="QLORA AI Chat bot: finetuned on Microsoft Phi 2 ", link = "https://huggingface.co/spaces/Gosula/ai_chatbot_phi2model_qlora") with gr.Tab(label = "💻Code Question Answering🔦"): gr.Markdown(" ## Question Answering Chatbot: CodeLLama", elem_id="margin-top") tokenizer = AutoTokenizer.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1") model = AutoModelForCausalLM.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1", torch_dtype=torch.float16) device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") model = model.to(device) gr.ChatInterface(predict_code) with gr.Tab(label="Spaces"): with gr.Row(elem_id="spaces-flex"): gr.Markdown("##### More Code LLama spaces examples") gr.Button( value="✨Detailed Code Gen Space✨", link = "https://huggingface.co/spaces/codeparrot/code-generation-models") gr.Button( value="HTML Code Generation from Images", link = "https://huggingface.co/spaces/taneemishere/html-code-generation-from-images-with-deep-neural-networks") gr.Button( value="Code LLama Playground", link = "https://huggingface.co/spaces/codellama/codellama-playground") with gr.Tab(label="Google Colab"): gr.Markdown("##### Google Colab links") gr.Button( value="Simple RAG: Github Issues ", link = "https://github.com/huggingface/cookbook/blob/main/notebooks/en/rag_zephyr_langchain.ipynb") gr.Button( value="Finetune Code LLM", link = "https://github.com/huggingface/cookbook/blob/main/notebooks/en/fine_tuning_code_llm_on_single_gpu.ipynb") gr.Button( value="RAG with MongoDB ", link = "https://github.com/huggingface/cookbook/blob/main/notebooks/en/rag_with_hugging_face_gemma_mongodb.ipynb") with gr.Blocks(css="#margin-top {margin-top: 15px} #center {text-align: center;} #news-tab {padding: 15px;} #news-tab h3 {margin: 0px; text-align: center;} #news-tab p {margin: 0px;} #article-button {flex-grow: initial;} #news-row {align-items: center;} #spaces-flex {flex-wrap: wrap; justify-content: space-around;} #space-card { display: flex; min-width: calc(90% / 3); max-width:calc(100% / 3); box-sizing: border-box;} #event-tabs {margin-top: 0px;} #spaces-flex > #paper-tile {min-width: 30%; max-width: 30%;}") as demo: with gr.Row(elem_id="center"): gr.Markdown("# Center of Excellence: Town Hall March 2024") gr.Markdown(""" At HMCoE in collaboration with HuggingFace are democratising good and cutting-edge machine learning. This page is a dedicated playground for the tools discussed at the March'24 Town Hall, in order to encourage and support the development and use of AI. We wish to foster ongoing of ethics and values; and your feedback is invaluable. Please open up an issue in the [Community tab](hhttps://huggingface.co/spaces/AITownHall2024/AI_n_HF_Intro/discussions) to share your thoughts! """) with gr.Accordion(label="Events", open=False): with gr.Tab(label="Upcoming Events"): with gr.Row(elem_id="margin-top"): with gr.Column(scale=1): gr.Image(value = "Town_Hall _banner(2).png", show_label=False) #gr.Image(value="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/making-intelligence-banner.png", show_label=False) with gr.Column(scale=2): with gr.Tabs(elem_id="event-tabs"): with gr.Tab("About the Event"): gr.Markdown(""" For HMCOE (His Majesty Center of Excellence) Town Hall, we're welcoming [Ezinwanne (Ezi) Ozoani](https://www.linkedin.com/in/ezi-ozoani/), [Solirius](https://www.solirius.com/), [PA Consulting](https://www.paconsulting.com/), [Cognizant](https://www.cognizant.com/us/en/about-cognizant) and [Docker](https://www.docker.com)! Slides to the event: [Click me!](https://www.canva.com/design/DAF-p1algoU/xLuzGPjJRrMq0uwlmj5UMA/edit?utm_content=DAF-p1algoU&utm_campaign=designshare&utm_medium=link2&utm_source=sharebutton) 🚀 **Date:** March 28th 2024, 1:00 PM GMT, **Location:** Online """) with gr.Tab("Speaker Bios"): gr.Markdown(""" ### About Ezinwanne (Ezi) Ozoani, (she/her) I’m a consultant, researcher, and open-source healthscience HuggingFace contributor, focused on applied AI & ethics. As a consultant with InEthos , I help organisations mitigate harm through AI risk management and auditing. I build cross-disciplinary AI/DL research projects that centre the ethics and values embedded in AI systems. I also love participatory problem-solving and community-driven projects. Prior to this, I was a Ai & Ethics Research Engineer at HuggingFace, building AI tools, artifacts that centre ethical principals. I made and continue to make AI development, governance more cross-disciplinary, reflective and empowering for impacted communities. I have a MSc in Computer Science with a research focus on Quantum & Quantum Enhanced ML from Trinity College Dublin. I'm a Surf and Art Enthusiast from Dublin, Ireland! #### Links - Consulting Outreach: [inEthos(Innovation n Ethics)](https://inethos.net) - Linkedin: [linkedin.com/in/eziozoani](https://www.linkedin.com/in/ezi-ozoani/) - Twitter: [@ezi_ozoani](https://twitter.com/ezi_ozoani) ### About Solirius - Evolution of Applied Al At Solirius we help our clients deliver transformational growth through innovative technology solutions and people-centred services. #### Links - Website: [Solirius.com](https://www.solirius.com/) - Linkedin: [https://www.linkedin.com/company/solirius/](https://www.linkedin.com/company/solirius/) - Twitter: [@solirius](https://twitter.com/solirius) ### PA Consulting - Ethics and Al We accelerate new growth ideas from concept, through design and development to commercial success. And we revitalise organisations with the leadership, culture, systems and processes to make innovation a reality. We deliver great work because of our brilliant people who live our purpose every day #### Links - Website: [PA Consulting](https://www.paconsulting.com/) - Linkedin: [https://www.linkedin.com/company/pa-consulting](https://www.linkedin.com/company/pa-consulting/) - Twitter: [@PA_Consulting](https://twitter.com/PA_Consulting) ### Cognizant - Code Companion Tools At Cognizant, we engineer modern businesses to improve everyday life. Because we’re dedicated to making a lasting impact. So, we do our part to promote inclusion, prepare people for the future of work, elevate underserved communities and transition to a circular and low-carbon future. **That’s what we call Engineering Impact for Good.** #### Links - Website: [Cognizant](https://www.cognizant.com/us/en/about-cognizant) - Linkedin: [https://www.linkedin.com/company/cognizant](https://www.linkedin.com/company/cognizant/) - Twitter: [@Cognizant](https://twitter.com/cognizant) ### Docker - GenAl Stack Docker helps developers bring their ideas to life by conquering the complexity of app development. Actively used by millions of developers around the world, Docker Desktop and Docker Hub provide unmatched simplicity, agility and choice. #### Links - Website: [Docker](https://www.docker.com) - Linkedin: [https://www.linkedin.com/company/docker/](https://www.linkedin.com/company/docker/) - Twitter: [@Docker](https://twitter.com/docker) """) with gr.Tab(label="Past Events"): with gr.Row(elem_id="margin-top"): gr.Markdown("We'll be announcing more events soon!") with gr.Column(): category_tab() #[category_tab(x) for x in categories] demo.launch()