File size: 2,480 Bytes
b3b2bb1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import gradio as gr

import pathlib
import textwrap

try:
    import google.generativeai as genai
except ImportError:
    print("WARNING: google.generativeai not found. Install with `pip install google-generativeai` for AI-powered responses.")
    genai = None

from IPython.display import display  # Only for development/testing
from IPython.display import Markdown  # Only for development/testing


def to_markdown(text):
    """Converts text to Markdown format with proper indentation.

    Args:
        text (str): The text to convert.

    Returns:
        str: The converted Markdown text.
    """

    text = text.replace('•', ' *')
    return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))


def chat(chat_history):
    """Generates a response based on the chat history.

    Args:
        chat_history (list): A list containing user messages and AI responses.

    Returns:
        str: The AI's response to the latest user message.
    """

    if not genai:
        return "AI responses are currently unavailable. Please install `google-generativeai` for this functionality."

    user_message = chat_history[-1]  # Get the latest user message
    try:
        response = model.generate_content(user_message, stream=True)
        for chunk in response:
            return chunk.text  # Return the first generated text chunk
    except Exception as e:
        print(f"Error during generation: {e}")
        return "An error occurred while generating the response. Please try again later."


interface = gr.Interface(
    fn=chat,
    inputs="chat",
    outputs="textbox",
    title="Gradio Chat App",
    description="Chat with an AI assistant (requires `google-generativeai`)",
    catch_exceptions=True,  # Catch exceptions and display informative messages
)

interface.launch()

# Code below is for development/testing purposes only (not required for Gradio app)
if __name__ == "__main__":
    if not genai:
        print("WARNING: google.generativeai not found. Install with `pip install google-generativeai` for AI-powered responses.")

    genai.configure(api_key='AIzaSyCMBk81YmILNTok8hd6tYtJaevp1qbl6I0')  # Replace with your actual API key
    model = genai.GenerativeModel('gemini-pro')

    chat_history = []
    while True:
        user_message = input("You: ")
        chat_history.append(user_message)
        response = chat(chat_history)
        print(f"AI: {response}")
        chat_history.append(response)
        print("-" * 80)