import gradio as gr import requests import json import os API_KEY = os.getenv('API_KEY') INVOKE_URL = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/functions/381be320-4721-4664-bd75-58f8783b43c7" FETCH_URL_FORMAT = "https://api.nvcf.nvidia.com/v2/nvcf/pexec/status/" headers = { "Authorization": f"Bearer {API_KEY}", "Accept": "application/json", "Content-Type": "application/json", } BASE_SYSTEM_MESSAGE = "I carefully provide accurate, factual, thoughtful, nuanced answers and am brilliant at reasoning." def clear_chat(chat_history_state, chat_message): print("Clearing chat...") chat_history_state = [] chat_message = '' return chat_history_state, chat_message def user(message, history, system_message=None): print(f"User message: {message}") history = history or [] if system_message: # Check if a system message is provided and should be added history.append({"role": "system", "content": system_message}) history.append({"role": "user", "content": message}) return history def call_nvidia_api(history, max_tokens, temperature, top_p): payload = { "messages": history, "temperature": temperature, "top_p": top_p, "max_tokens": max_tokens, "stream": False } print(f"Payload enviado: {payload}") # Imprime o payload enviado session = requests.Session() response = session.post(INVOKE_URL, headers=headers, json=payload) while response.status_code == 202: request_id = response.headers.get("NVCF-REQID") fetch_url = FETCH_URL_FORMAT + request_id response = session.get(fetch_url, headers=headers) response.raise_for_status() response_body = response.json() print(f"Payload recebido: {response_body}") # Imprime o payload recebido if response_body["choices"]: assistant_message = response_body["choices"][0]["message"]["content"] history.append({"role": "assistant", "content": assistant_message}) return history def chat(history, system_message, max_tokens, temperature, top_p): print("Starting chat...") updated_history = call_nvidia_api(history, max_tokens, temperature, top_p) return updated_history, "" # Gradio interface setup with gr.Blocks() as demo: with gr.Row(): with gr.Column(): gr.Markdown("Mamba Chat Free Demo") description="""
Explore the Capabilities of Mamba Chat

Mamba-Chat is a state-of-the-art AI model designed for efficient sequence modeling. The model can be used for text generation and chat applications

How to Use:

  1. Enter your message in the textbox to start a conversation or ask a question.
  2. Adjust the Temperature and Top P sliders to control the creativity and diversity of the responses.
  3. Set the Max Tokens slider to determine the length of the response.
  4. Use the System Message textbox if you wish to provide a specific context or instruction for the AI.
  5. Click Send message to submit your query and receive a response from YI 34B.
  6. Press New topic to clear the chat history and start a new conversation thread.

Powered by NVIDIA's cutting-edge AI API, Mamba Chat offers an unparalleled opportunity to interact with an AI model of exceptional conversational ability, accessible to everyone at no cost.

HF Created by: @artificialguybr (Twitter)

Discover more: artificialguy.com

""" gr.Markdown(description) chatbot = gr.Chatbot() message = gr.Textbox(label="What do you want to chat about?", placeholder="Ask me anything.", lines=3) submit = gr.Button(value="Send message") clear = gr.Button(value="New topic") system_msg = gr.Textbox(BASE_SYSTEM_MESSAGE, label="System Message", placeholder="System prompt.", lines=5) max_tokens = gr.Slider(20, 1024, label="Max Tokens", step=20, value=1024, interactive=True) temperature = gr.Slider(0.0, 1.0, label="Temperature", step=0.1, value=0.7, interactive=True) top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95, interactive=True) chat_history_state = gr.State([]) # Ajuste na definição da função update_chatbot para aceitar o valor atualizado do system_msg def update_chatbot(message, chat_history, system_message, max_tokens, temperature, top_p): print("Updating chatbot...") if not chat_history or (chat_history and chat_history[-1]["role"] != "user"): chat_history = user(message, chat_history, system_message if not chat_history else None) else: chat_history = user(message, chat_history) chat_history, _ = chat(chat_history, system_message, max_tokens, temperature, top_p) formatted_chat_history = [] for user_msg, assistant_msg in zip([msg["content"].strip() for msg in chat_history if msg["role"] == "user"], [msg["content"].strip() for msg in chat_history if msg["role"] == "assistant"]): if user_msg or assistant_msg: # Verify if either message is not empty formatted_chat_history.append([user_msg, assistant_msg]) return formatted_chat_history, chat_history, "" submit.click( fn=update_chatbot, inputs=[message, chat_history_state, system_msg, max_tokens, temperature, top_p], outputs=[chatbot, chat_history_state, message] ) clear.click( fn=clear_chat, inputs=[chat_history_state, message], outputs=[chat_history_state, message] ) demo.launch()