File size: 5,013 Bytes
a2f556f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import gradio as gr
import time
from huggingface_inference_api import InferenceClient

import os
from huggingface_inference_api import InferenceClient

HF_TOKEN = os.getenv("HF_API_KEY")  # Ortam değişkeninden al
client = InferenceClient(HF_API_KEY)

MODELS = {
    "Flower Chat": "openai/gpt-oss-20b",
    "Flower Coder": "Qwen/Qwen3-Coder-30B-A3B-Instruct",
    "Flower MULTI": "moonshotai/Kimi-K2-Instruct",
}

def generate_response(messages, model_name):
    # messages = list of dicts {role, content}
    # model_name = seçilen model key
    
    response = client.chat_completion(
        model=MODELS[model_name],
        messages=messages,
    )
    return response.choices[0].message.content


def process_chat(user_message, chat_history, model_name, think_mode):
    # chat_history: list of tuples (user_msg, bot_msg)
    # think_mode: bool - düşünme animasyonu göster
    
    if chat_history is None:
        chat_history = []

    # Convert chat_history tuples to messages format
    messages = []
    for user_m, bot_m in chat_history:
        messages.append({"role": "user", "content": user_m})
        messages.append({"role": "assistant", "content": bot_m})

    # Append current user message
    messages.append({"role": "user", "content": user_message})

    if think_mode:
        # Düşünme animasyonu için gecikmeli cevap (örnek)
        time.sleep(1.5)

    try:
        bot_reply = generate_response(messages, model_name)
    except Exception as e:
        bot_reply = f"❌ Hata: {str(e)}"

    chat_history.append((user_message, bot_reply))
    return chat_history, chat_history


# Kod modunda sadece Flower Coder aktif
def run_code_mode(user_code, chat_history, model_name):
    if chat_history is None:
        chat_history = []

    messages = []
    for user_m, bot_m in chat_history:
        messages.append({"role": "user", "content": user_m})
        messages.append({"role": "assistant", "content": bot_m})
    
    messages.append({"role": "user", "content": user_code})

    try:
        response = client.chat_completion(
            model=MODELS[model_name],
            messages=messages,
        )
        bot_reply = response.choices[0].message.content
    except Exception as e:
        bot_reply = f"❌ Hata: {str(e)}"
    
    chat_history.append((user_code, bot_reply))
    return bot_reply, chat_history


with gr.Blocks(css="""
  body { background: linear-gradient(135deg, #F5F5F5 0%, #FFF0F8 100%); font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans", "Helvetica Neue", sans-serif;}
  .chatbox .message.user { background-color: #ffceda !important; color: #000;}
  .chatbox .message.bot { background-color: #fff !important; color: #000;}
  .header { font-size: 2.5rem; font-weight: 700; color: #FFB6C1; text-align: center; margin-bottom: 10px;}
  .footer { margin-top: 10px;}
  .think-indicator { font-style: italic; color: gray; margin-left: 10px;}
  .model-dropdown { width: 180px;}
""") as demo:

    gr.Markdown("<div class='header'>🌸 Flower AI Chat</div>")

    with gr.Row():
        model_dropdown = gr.Dropdown(list(MODELS.keys()), value="Flower Chat", label="Model Seçimi", elem_classes="model-dropdown")
        think_toggle = gr.Checkbox(label="Think Modu (Düşünme Animasyonu)", value=False)

    chatbot = gr.Chatbot(elem_classes="chatbox", label="Sohbet")

    with gr.Row():
        user_input = gr.Textbox(placeholder="Ne bilmek istiyorsun?", label="Mesajını yaz", lines=1, max_lines=4)
        send_btn = gr.Button("Gönder")

    # Flower Coder için ayrı kod editörü ve çalışma butonu
    with gr.Row(visible=False) as code_panel:
        code_editor = gr.Textbox(label="Kod Yaz (Flower Coder modu)", lines=10, max_lines=30, interactive=True, placeholder="// Kodunu buraya yaz...")
        run_code_btn = gr.Button("Çalıştır")
        code_output = gr.Textbox(label="Çıktı", lines=10, interactive=False)

    state = gr.State([])  # Sohbet geçmişi

    def update_ui(model_name):
        # Flower Coder modunda kod panelini göster, diğerlerinde gizle
        if model_name == "Flower Coder":
            code_panel.visible = True
            user_input.visible = False
            send_btn.visible = False
            chatbot.label = "Kod ile Sohbet"
        else:
            code_panel.visible = False
            user_input.visible = True
            send_btn.visible = True
            chatbot.label = "Sohbet"
        return code_panel, user_input, send_btn, chatbot

    model_dropdown.change(update_ui, inputs=model_dropdown, outputs=[code_panel, user_input, send_btn, chatbot])

    send_btn.click(process_chat, inputs=[user_input, state, model_dropdown, think_toggle], outputs=[chatbot, state])
    user_input.submit(process_chat, inputs=[user_input, state, model_dropdown, think_toggle], outputs=[chatbot, state])

    run_code_btn.click(run_code_mode, inputs=[code_editor, state, model_dropdown], outputs=[code_output, state])

demo.launch()