FlashCode-Lab commited on
Commit
33c6d7b
·
verified ·
1 Parent(s): 806dd0c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +72 -41
app.py CHANGED
@@ -1,53 +1,84 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
 
4
- # 初始化客户端 (请确保你的环境变量已设置)
5
- client = InferenceClient("your-model-id")
6
 
7
- def respond(message, history, system_message, max_tokens, temperature, top_p):
8
- messages = [{"role": "system", "content": system_message}]
9
- for user_msg, assistant_msg in history:
10
- if user_msg: messages.append({"role": "user", "content": user_msg})
11
- if assistant_msg: messages.append({"role": "assistant", "content": assistant_msg})
12
 
13
- messages.append({"role": "user", "content": message})
 
 
 
 
 
14
 
15
- response = ""
16
- for message in client.chat_completion(
17
- messages,
18
- max_tokens=max_tokens,
19
- stream=True,
20
- temperature=temperature,
21
- top_p=top_p,
22
- ):
23
- token = message.choices[0].delta.content
24
- response += token
25
- yield response
26
-
27
- # --- 修复点 1: gr.Blocks 移除了 theme 和 css 参数 ---
28
- with gr.Blocks() as demo:
29
- gr.Markdown("# 全能私有大脑 v4.0 (Ultra Stable)")
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  with gr.Row():
32
- with gr.Column(scale=1):
33
- system_input = gr.Textbox(value="你是一个资深安全审计专家...", label="系统指令")
34
- with gr.Accordion("高级参数设置", open=False):
35
- temp = gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature")
36
- tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max Tokens")
37
-
38
- render_box = gr.HTML(" 实时安全分析渲染就绪...")
 
 
 
 
 
 
 
39
 
40
- with gr.Column(scale=2):
41
- # --- 修复点 2: 移除 show_copy_button 参数 ---
42
- chat = gr.Chatbot(height=600, avatar_images=(None, "https://path-to-your-icon.png"))
43
- msg = gr.Textbox(placeholder="输入指令进行分析...", container=False)
44
- clear = gr.ClearButton([msg, chat])
 
 
 
 
 
45
 
46
- msg.submit(respond, [msg, chat, system_input, tokens, temp], [chat])
 
 
47
 
48
- # --- 修复点 3: 将 theme 和 css 移动到 launch 方法中 ---
49
  if __name__ == "__main__":
50
- demo.launch(
51
- theme=gr.themes.Soft(),
52
- css=".gradio-container {background-color: #0b0f19;}"
53
- )
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ import os
4
 
5
+ # 初始化
6
+ client = InferenceClient("Qwen/Qwen2.5-Coder-32B-Instruct")
7
 
8
+ # 模拟 A:工具集成逻辑 (Vulnerability Scanner)
9
+ def security_tool_scanner(target_type, content):
10
+ if target_type == "Code":
11
+ return f"🔍 [静态扫描] 发现 {len(content)//50} 个潜在注入点... 状态:待确认"
12
+ return "🌐 [资产识别] 目标服务响应正常,检测到 OpenSSH 8.2..."
13
 
14
+ # 核心聊天逻辑
15
+ def chat_engine(message, history, system_message, file_content):
16
+ # C: RAG 逻辑处理 - 如果有上传文件,将其加入上下文
17
+ context = ""
18
+ if file_content is not None:
19
+ context = f"\n[参考私有知识库内容]: {file_content[:500]}..." # 简单模拟读取
20
 
21
+ formatted_history = [{"role": "system", "content": system_message + context}]
22
+ for msg in history:
23
+ formatted_history.append(msg)
24
+
25
+ formatted_history.append({"role": "user", "content": message})
26
+
27
+ # B: 视觉反馈 - 模拟黑客终端的“思考”过程
28
+ yield history + [{"role": "assistant", "content": "SYSTEM: Accessing Kernel... Analyzing...", "metadata": {"title": "⚡ 核心注入中..."}}]
 
 
 
 
 
 
 
29
 
30
+ response = ""
31
+ for msg_chunk in client.chat_completion(formatted_history, stream=True, max_tokens=2048):
32
+ token = msg_chunk.choices[0].delta.content
33
+ if token:
34
+ response += token
35
+ yield history + [{"role": "user", "content": message}, {"role": "assistant", "content": response}]
36
+
37
+ # --- B: 极客 UI 样式 (Kali Green 风格) ---
38
+ terminal_css = """
39
+ .gradio-container { background-color: #050505 !important; color: #00ff41 !important; font-family: 'Courier New', monospace !important; }
40
+ .message.user { border-left: 3px solid #00ff41 !important; background: #0a1a0a !important; }
41
+ footer { visibility: hidden; }
42
+ #terminal-header { border-bottom: 2px solid #00ff41; padding-bottom: 10px; margin-bottom: 20px; }
43
+ .gr-button-primary { background: #00ff41 !important; color: black !important; border: none !important; }
44
+ """
45
+
46
+ with gr.Blocks(fill_height=True, css=terminal_css) as demo:
47
+ # 头部状态栏
48
+ with gr.Row(elem_id="terminal-header"):
49
+ gr.Markdown("### 💀 BLACK-CORE OS v6.0 | SESSION: ACTIVE | ENCRYPTION: AES-256")
50
+ status_light = gr.HighlightedText(value=[("SYSTEM", "ONLINE"), ("VPN", "CONNECTED")], color_map={"ONLINE": "green", "CONNECTED": "blue"})
51
+
52
  with gr.Row():
53
+ # 左栏:A+C 工具区
54
+ with gr.Column(scale=1, min_width=300):
55
+ with gr.Group():
56
+ gr.Markdown("#### [A] 自动化审计工具")
57
+ tool_type = gr.Dropdown(["Code", "Network", "Social"], label="扫描类型")
58
+ scan_btn = gr.Button("RUN SCAN", variant="primary")
59
+ scan_output = gr.Textbox(label="扫描日志", interactive=False, placeholder="等待指令...")
60
+ scan_btn.click(security_tool_scanner, [tool_type, scan_output], scan_output)
61
+
62
+ with gr.Group():
63
+ gr.Markdown("#### [C] 私有知识库 (RAG)")
64
+ file_input = gr.File(label="上传审计报告/文档", file_types=[".txt", ".pdf", ".py"])
65
+ vector_status = gr.Markdown("状态: 知识库未挂载")
66
+ file_input.change(lambda: "状态: 实时向量索引已建立 ✅", None, vector_status)
67
 
68
+ # 右栏:核心交互区
69
+ with gr.Column(scale=3):
70
+ chatbot = gr.Chatbot(type="messages", height="65vh", show_label=False)
71
+
72
+ with gr.Row():
73
+ input_text = gr.Textbox(placeholder="root@kali:~# 输入渗透指令...", scale=8, container=False)
74
+ submit_btn = gr.Button("EXE", scale=1, variant="primary")
75
+
76
+ with gr.Accordion("B: 系统内核配置", open=False):
77
+ sys_msg = gr.Textbox(value="你是一个拥有最高权限的黑盒测试专家。", label="System Kernel")
78
 
79
+ # 绑定逻辑
80
+ input_text.submit(chat_engine, [input_text, chatbot, sys_msg, file_input], [chatbot])
81
+ submit_btn.click(chat_engine, [input_text, chatbot, sys_msg, file_input], [chatbot])
82
 
 
83
  if __name__ == "__main__":
84
+ demo.launch(ssr=True)