broadfield-dev commited on
Commit
30f0473
Β·
verified Β·
1 Parent(s): d9af520

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +267 -139
app.py CHANGED
@@ -30,7 +30,6 @@ try:
30
  print("model_logic.py loaded successfully.")
31
  except ImportError:
32
  print("Warning: Local modules (build_logic.py, model_logic.py) not found. Using dummy functions.")
33
- # Define dummy functions so the app can at least start
34
  def get_available_providers(): return ["DummyProvider"]
35
  def get_models_for_provider(p): return ["dummy-model"]
36
  def get_default_model_for_provider(p): return "dummy-model"
@@ -42,10 +41,11 @@ except ImportError:
42
  def list_space_files_for_browsing(*args): return ([], "Error: build_logic not found.")
43
  def get_space_repository_info(*args): return (None, [], "Error: build_logic not found.")
44
  def get_space_file_content(*args): return ("", "Error: build_logic not found.")
45
- def update_space_file(*args): return "Error: build_logic not found."
46
  def build_logic_parse_markdown(md): return {"files": []}
47
  def build_logic_delete_space_file(*args): return "Error: build_logic not found."
48
  def get_space_runtime_status(*args): return (None, "Error: build_logic not found.")
 
49
 
50
 
51
  # --- New Feature Functions (can be moved to build_logic.py) ---
@@ -133,7 +133,7 @@ You can issue multiple actions. For example, to delete a file and then add a new
133
  Use these actions when the user's request explicitly calls for them (e.g., "delete the readme file", "make this space private", "create a new private space called my-test-app"). If no code is provided, assist the user with their tasks.
134
  """
135
 
136
-
137
  def escape_html_for_markdown(text):
138
  if not isinstance(text, str): return ""
139
  return text.replace("&", "&").replace("<", "<").replace(">", ">")
@@ -310,32 +310,111 @@ def _generate_ui_outputs_from_cache(owner, space_name):
310
 
311
  return formatted_md_val, preview_md_val, gr.update(value=download_file, interactive=download_file is not None)
312
 
313
- def handle_chat_submit(user_message, chat_history, hf_api_key_input, provider_select, model_select, system_prompt, hf_owner_name, hf_repo_name, _current_formatted_markdown):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
314
  global parsed_code_blocks_state_cache
315
- _chat_msg_in = ""
316
- _chat_hist = list(chat_history)
317
- _status = "Initializing..."
318
- _detected_files_update, _formatted_output_update, _download_btn_update = gr.update(), gr.update(), gr.update(interactive=False, value=None)
 
 
 
 
319
 
320
  if not user_message.strip():
321
- _status = "Cannot send an empty message."
322
- yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update); return
323
-
324
- _chat_hist.append((user_message, None)); _status = f"Sending to {model_select}..."
325
- yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
 
326
 
 
 
 
 
 
 
 
 
327
  current_sys_prompt = system_prompt.strip() or DEFAULT_SYSTEM_PROMPT
328
- current_files_context = f"\n\n## Current Space Context: {hf_owner_name}/{hf_repo_name}\n"
329
  export_result = _export_selected_logic(None, f"{hf_owner_name}/{hf_repo_name}", parsed_code_blocks_state_cache)
330
- current_files_context += export_result["output_str"]
331
-
332
  user_message_with_context = user_message.strip() + "\n" + current_files_context
333
  api_msgs = _convert_gr_history_to_api_messages(current_sys_prompt, _chat_hist[:-1], user_message_with_context)
334
 
335
  try:
336
- _status = f"Waiting for {model_select}..."
337
- yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update())
338
-
339
  full_bot_response_content = ""
340
  for chunk in generate_stream(provider_select, model_select, None, api_msgs):
341
  if chunk is None: continue
@@ -343,65 +422,137 @@ def handle_chat_submit(user_message, chat_history, hf_api_key_input, provider_se
343
  full_bot_response_content = chunk; break
344
  full_bot_response_content += str(chunk)
345
  _chat_hist[-1] = (user_message, full_bot_response_content)
346
- _status = f"Streaming from {model_select}..."
347
- yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update())
 
 
 
348
 
349
  if "Error:" in full_bot_response_content:
350
  _status = full_bot_response_content
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
351
  else:
352
- _status = "Stream complete. Processing response..."
353
- action_results = []
354
- action_pattern = re.compile(r"### HF_ACTION:\s*(?P<command_line>[^\n]+)")
355
-
356
- # Process destructive actions first
357
- for match in action_pattern.finditer(full_bot_response_content):
358
- cmd_parts = shlex.split(match.group("command_line").strip())
359
- if not cmd_parts: continue
360
- command, args = cmd_parts[0].upper(), cmd_parts[1:]
361
- if command == "DELETE_FILE":
362
- if not args: action_results.append("Action Failed: DELETE_FILE needs a path."); continue
363
- filepath = args[0]
364
- status_msg = build_logic_delete_space_file(hf_api_key_input, hf_repo_name, hf_owner_name, filepath)
365
- action_results.append(f"DELETE '{filepath}': {status_msg}")
366
- if "Successfully" in status_msg:
367
- parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != filepath]
368
- elif command == "DELETE_SPACE":
369
- status_msg = build_logic_delete_space(hf_api_key_input, hf_owner_name, hf_repo_name)
370
- action_results.append(f"DELETE_SPACE '{hf_owner_name}/{hf_repo_name}': {status_msg}")
371
- if "Successfully" in status_msg: parsed_code_blocks_state_cache = []
372
-
373
- # Process other actions
374
- for match in action_pattern.finditer(full_bot_response_content):
375
- cmd_parts = shlex.split(match.group("command_line").strip())
376
- if not cmd_parts: continue
377
- command, args = cmd_parts[0].upper(), cmd_parts[1:]
378
- if command == "SET_PRIVATE":
379
- if not args: action_results.append("Action Failed: SET_PRIVATE needs true/false."); continue
380
- is_private = args[0].lower() == 'true'
381
- status_msg = build_logic_set_space_privacy(hf_api_key_input, f"{hf_owner_name}/{hf_repo_name}", private=is_private)
382
- action_results.append(f"SET_PRIVATE to {is_private}: {status_msg}")
383
-
384
- parsing_res = _parse_chat_stream_logic(full_bot_response_content, existing_files_state=parsed_code_blocks_state_cache)
385
- if parsing_res["error_message"]:
386
- action_results.append(f"Parsing Error: {parsing_res['error_message']}")
387
- else:
388
- parsed_code_blocks_state_cache = parsing_res["parsed_code_blocks"]
389
- if any(f.get("filename") for f in parsing_res["parsed_code_blocks"]):
390
- action_results.append("File content updated from AI response.")
391
-
392
- _status = " | ".join(action_results) if action_results else "No actions performed. Files parsed."
393
-
394
- _formatted_output_update, _detected_files_update, _download_btn_update = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
395
 
396
  except Exception as e:
397
  error_msg = f"An unexpected error occurred: {e}"
398
  print(f"Error in handle_chat_submit: {e}")
399
- if _chat_hist:
400
- _chat_hist[-1] = (user_message, error_msg)
401
- _status = error_msg
402
- _formatted_output_update, _detected_files_update, _download_btn_update = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
403
 
404
- yield (_chat_msg_in, _chat_hist, _status, _detected_files_update, _formatted_output_update, _download_btn_update)
 
 
405
 
406
  def update_models_dropdown(provider_select):
407
  if not provider_select: return gr.update(choices=[], value=None)
@@ -534,79 +685,30 @@ def handle_refresh_space_status(hf_api_key_ui, ui_owner_name, ui_space_name):
534
  for key, val in status.items():
535
  md += f"- **{key.replace('_', ' ').title()}:** `{val}`\n"
536
  return md
537
- custom_theme = gr.themes.Base(
538
- primary_hue="teal",
539
- secondary_hue="purple",
540
- neutral_hue="zinc",
541
- text_size="sm",
542
- spacing_size="md",
543
- radius_size="sm",
544
- font=["System UI", "sans-serif"]
545
- )
546
 
 
 
547
  custom_css = """
548
- body {
549
- background: linear-gradient(to bottom right, #2c3e50, #34495e);
550
- color: #ecf0f1;
551
- }
552
- .gradio-container {
553
- background: transparent !important;
554
- }
555
- .gr-box, .gr-panel, .gr-pill {
556
- background-color: rgba(44, 62, 80, 0.8) !important;
557
- border-color: rgba(189, 195, 199, 0.2) !important;
558
- }
559
- .gr-textbox, .gr-dropdown, .gr-button, .gr-code, .gr-chat-message {
560
- border-color: rgba(189, 195, 199, 0.3) !important;
561
- background-color: rgba(52, 73, 94, 0.9) !important;
562
- color: #ecf0f1 !important;
563
- }
564
- .gr-button.gr-button-primary {
565
- background-color: #1abc9c !important;
566
- color: white !important;
567
- border-color: #16a085 !important;
568
- }
569
- .gr-button.gr-button-secondary {
570
- background-color: #9b59b6 !important;
571
- color: white !important;
572
- border-color: #8e44ad !important;
573
- }
574
- .gr-button.gr-button-stop {
575
- background-color: #e74c3c !important;
576
- color: white !important;
577
- border-color: #c0392b !important;
578
- }
579
- .gr-markdown {
580
- background-color: rgba(44, 62, 80, 0.7) !important;
581
- padding: 10px;
582
- border-radius: 5px;
583
- }
584
- .gr-markdown h1, .gr-markdown h2, .gr-markdown h3, .gr-markdown h4, .gr-markdown h5, .gr-markdown h6 {
585
- color: #ecf0f1 !important;
586
- border-bottom-color: rgba(189, 195, 199, 0.3) !important;
587
- }
588
- .gr-markdown pre code {
589
- background-color: rgba(52, 73, 94, 0.95) !important;
590
- border-color: rgba(189, 195, 199, 0.3) !important;
591
- }
592
- .gr-chatbot {
593
- background-color: rgba(44, 62, 80, 0.7) !important;
594
- border-color: rgba(189, 195, 199, 0.2) !important;
595
- }
596
- .gr-chatbot .message {
597
- background-color: rgba(52, 73, 94, 0.9) !important;
598
- color: #ecf0f1 !important;
599
- border-color: rgba(189, 195, 199, 0.3) !important;
600
- }
601
- .gr-chatbot .message.user {
602
- background-color: rgba(46, 204, 113, 0.9) !important;
603
- color: black !important;
604
- }
605
  """
606
 
607
  # --- Gradio UI Definition ---
608
- #with gr.Blocks(theme=gr.themes.Soft(), css=".gradio-container {background: linear-gradient(to bottom right, #eff6ff, #dbeafe);}") as demo:
609
  with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
 
 
 
610
  gr.Markdown("# πŸ€– AI-Powered Hugging Face Space Builder")
611
  gr.Markdown("Use an AI assistant to create, modify, build, and manage your Hugging Face Spaces directly from this interface.")
612
 
@@ -631,6 +733,13 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
631
  send_chat_button = gr.Button("Send", variant="primary", scale=1)
632
  status_output = gr.Textbox(label="Last Action Status", interactive=False, value="Ready.")
633
 
 
 
 
 
 
 
 
634
  with gr.Tabs():
635
  with gr.TabItem("πŸ“ Generated Markdown & Build"):
636
  with gr.Row():
@@ -641,7 +750,7 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
641
  gr.Markdown("### Build Controls")
642
  space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio")
643
  space_private_checkbox = gr.Checkbox(label="Make Space Private", value=False)
644
- build_space_button = gr.Button("πŸš€ Build / Update Space on HF", variant="primary")
645
  build_status_display = gr.Textbox(label="Build Operation Status", interactive=False)
646
  refresh_status_button = gr.Button("πŸ”„ Refresh Runtime Status")
647
  space_runtime_status_display = gr.Markdown("*Runtime status will appear here.*")
@@ -667,11 +776,30 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
667
  # --- Event Listeners ---
668
  provider_select.change(update_models_dropdown, inputs=provider_select, outputs=model_select)
669
 
670
- chat_inputs = [chat_message_input, chatbot_display, hf_api_key_input, provider_select, model_select, system_prompt_input, owner_name_input, space_name_input, formatted_space_output_display]
671
- chat_outputs = [chat_message_input, chatbot_display, status_output, detected_files_preview, formatted_space_output_display, download_button]
 
 
 
 
672
  send_chat_button.click(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
673
  chat_message_input.submit(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
674
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
675
  load_space_outputs = [formatted_space_output_display, detected_files_preview, status_output, file_browser_dropdown, owner_name_input, space_name_input, space_iframe_display, download_button, build_status_display, edit_status_display, space_runtime_status_display, chatbot_display]
676
  load_space_button.click(fn=handle_load_existing_space, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=load_space_outputs)
677
 
 
30
  print("model_logic.py loaded successfully.")
31
  except ImportError:
32
  print("Warning: Local modules (build_logic.py, model_logic.py) not found. Using dummy functions.")
 
33
  def get_available_providers(): return ["DummyProvider"]
34
  def get_models_for_provider(p): return ["dummy-model"]
35
  def get_default_model_for_provider(p): return "dummy-model"
 
41
  def list_space_files_for_browsing(*args): return ([], "Error: build_logic not found.")
42
  def get_space_repository_info(*args): return (None, [], "Error: build_logic not found.")
43
  def get_space_file_content(*args): return ("", "Error: build_logic not found.")
44
+ def update_space_file(*args, **kwargs): return "Error: build_logic not found."
45
  def build_logic_parse_markdown(md): return {"files": []}
46
  def build_logic_delete_space_file(*args): return "Error: build_logic not found."
47
  def get_space_runtime_status(*args): return (None, "Error: build_logic not found.")
48
+ # --- END: Dummy functions ---
49
 
50
 
51
  # --- New Feature Functions (can be moved to build_logic.py) ---
 
133
  Use these actions when the user's request explicitly calls for them (e.g., "delete the readme file", "make this space private", "create a new private space called my-test-app"). If no code is provided, assist the user with their tasks.
134
  """
135
 
136
+ # --- Helper Functions (largely unchanged) ---
137
  def escape_html_for_markdown(text):
138
  if not isinstance(text, str): return ""
139
  return text.replace("&", "&").replace("<", "<").replace(">", ">")
 
310
 
311
  return formatted_md_val, preview_md_val, gr.update(value=download_file, interactive=download_file is not None)
312
 
313
+ # --- NEW: Core logic for Change Staging and Confirmation ---
314
+
315
+ def generate_and_stage_changes(ai_response_content, current_files_state, hf_owner_name, hf_repo_name):
316
+ """
317
+ Parses AI response, compares with current state, and generates a structured changeset.
318
+ Returns the changeset and a markdown summary for display.
319
+ """
320
+ changeset = []
321
+ current_files_dict = {f["filename"]: f for f in current_files_state if not f.get("is_structure_block")}
322
+
323
+ # 1. Parse proposed files from AI response
324
+ parsing_result = _parse_chat_stream_logic(ai_response_content, existing_files_state=current_files_state)
325
+ proposed_files = parsing_result.get("parsed_code_blocks", [])
326
+
327
+ # 2. Parse HF_ACTION commands from AI response
328
+ action_pattern = re.compile(r"### HF_ACTION:\s*(?P<command_line>[^\n]+)")
329
+ for match in action_pattern.finditer(ai_response_content):
330
+ cmd_parts = shlex.split(match.group("command_line").strip())
331
+ if not cmd_parts: continue
332
+ command, args = cmd_parts[0].upper(), cmd_parts[1:]
333
+
334
+ # Add actions to the changeset
335
+ if command == "DELETE_FILE" and args:
336
+ changeset.append({"type": "DELETE_FILE", "path": args[0]})
337
+ elif command == "SET_PRIVATE" and args:
338
+ changeset.append({"type": "SET_PRIVACY", "private": args[0].lower() == 'true', "repo_id": f"{hf_owner_name}/{hf_repo_name}"})
339
+ elif command == "DELETE_SPACE":
340
+ changeset.append({"type": "DELETE_SPACE", "owner": hf_owner_name, "space_name": hf_repo_name})
341
+ elif command == "CREATE_SPACE" and args:
342
+ repo_id = args[0]
343
+ sdk = "gradio" # default
344
+ private = False # default
345
+ if '--sdk' in args: sdk = args[args.index('--sdk') + 1]
346
+ if '--private' in args: private = args[args.index('--private') + 1].lower() == 'true'
347
+ changeset.append({"type": "CREATE_SPACE", "repo_id": repo_id, "sdk": sdk, "private": private})
348
+
349
+ # 3. Compare proposed files with current files to determine CREATE/UPDATE
350
+ for file_block in proposed_files:
351
+ if file_block.get("is_structure_block"): continue
352
+
353
+ filename = file_block["filename"]
354
+ if filename not in current_files_dict:
355
+ changeset.append({"type": "CREATE_FILE", "path": filename, "content": file_block["code"], "lang": file_block["language"]})
356
+ elif file_block["code"] != current_files_dict[filename]["code"]:
357
+ changeset.append({"type": "UPDATE_FILE", "path": filename, "content": file_block["code"], "lang": file_block["language"]})
358
+
359
+ # 4. Format the changeset into a human-readable Markdown string
360
+ if not changeset:
361
+ return [], "The AI did not propose any specific changes to files or the space.", parsing_result
362
+
363
+ md_summary = ["### πŸ“‹ Proposed Changes Plan\n"]
364
+ md_summary.append("The AI has proposed the following changes. Please review and confirm.")
365
+
366
+ for change in changeset:
367
+ if change["type"] == "CREATE_FILE":
368
+ md_summary.append(f"- **βž• Create File:** `{change['path']}`")
369
+ elif change["type"] == "UPDATE_FILE":
370
+ md_summary.append(f"- **πŸ”„ Update File:** `{change['path']}`")
371
+ elif change["type"] == "DELETE_FILE":
372
+ md_summary.append(f"- **βž– Delete File:** `{change['path']}`")
373
+ elif change["type"] == "CREATE_SPACE":
374
+ md_summary.append(f"- **πŸš€ Create New Space:** `{change['repo_id']}` (SDK: {change['sdk']}, Private: {change['private']})")
375
+ elif change["type"] == "SET_PRIVACY":
376
+ md_summary.append(f"- **πŸ”’ Set Privacy:** Set `{change['repo_id']}` to `private={change['private']}`")
377
+ elif change["type"] == "DELETE_SPACE":
378
+ md_summary.append(f"- **πŸ’₯ DELETE ENTIRE SPACE:** `{change['owner']}/{change['space_name']}` **(DESTRUCTIVE ACTION)**")
379
+
380
+ return changeset, "\n".join(md_summary), parsing_result
381
+
382
+ # --- Gradio Event Handlers ---
383
+
384
+ def handle_chat_submit(user_message, chat_history, hf_api_key_input, provider_select, model_select, system_prompt, hf_owner_name, hf_repo_name):
385
  global parsed_code_blocks_state_cache
386
+ _chat_msg_in, _chat_hist = "", list(chat_history)
387
+
388
+ # UI updates for streaming
389
+ yield (
390
+ _chat_msg_in, _chat_hist, "Initializing...",
391
+ gr.update(), gr.update(), gr.update(interactive=False),
392
+ [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
393
+ )
394
 
395
  if not user_message.strip():
396
+ yield (
397
+ _chat_msg_in, _chat_hist, "Cannot send an empty message.",
398
+ gr.update(), gr.update(), gr.update(),
399
+ [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
400
+ )
401
+ return
402
 
403
+ _chat_hist.append((user_message, None))
404
+ yield (
405
+ _chat_msg_in, _chat_hist, f"Sending to {model_select}...",
406
+ gr.update(), gr.update(), gr.update(),
407
+ [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
408
+ )
409
+
410
+ # Prepare context for the AI
411
  current_sys_prompt = system_prompt.strip() or DEFAULT_SYSTEM_PROMPT
 
412
  export_result = _export_selected_logic(None, f"{hf_owner_name}/{hf_repo_name}", parsed_code_blocks_state_cache)
413
+ current_files_context = f"\n\n## Current Space Context: {hf_owner_name}/{hf_repo_name}\n{export_result['output_str']}"
 
414
  user_message_with_context = user_message.strip() + "\n" + current_files_context
415
  api_msgs = _convert_gr_history_to_api_messages(current_sys_prompt, _chat_hist[:-1], user_message_with_context)
416
 
417
  try:
 
 
 
418
  full_bot_response_content = ""
419
  for chunk in generate_stream(provider_select, model_select, None, api_msgs):
420
  if chunk is None: continue
 
422
  full_bot_response_content = chunk; break
423
  full_bot_response_content += str(chunk)
424
  _chat_hist[-1] = (user_message, full_bot_response_content)
425
+ yield (
426
+ _chat_msg_in, _chat_hist, f"Streaming from {model_select}...",
427
+ gr.update(), gr.update(), gr.update(),
428
+ [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
429
+ )
430
 
431
  if "Error:" in full_bot_response_content:
432
  _status = full_bot_response_content
433
+ yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), [], gr.update(), gr.update(), gr.update(), gr.update())
434
+ return
435
+
436
+ # NEW: Instead of applying, generate and stage changes
437
+ _status = "Stream complete. Generating change plan..."
438
+ yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), [], gr.update(), gr.update(), gr.update(), gr.update())
439
+
440
+ staged_changeset, summary_md, parsing_res = generate_and_stage_changes(full_bot_response_content, parsed_code_blocks_state_cache, hf_owner_name, hf_repo_name)
441
+
442
+ if parsing_res["error_message"]:
443
+ _status = f"Parsing Error: {parsing_res['error_message']}"
444
+ yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), [], gr.update(), gr.update(), gr.update(), gr.update())
445
+ return
446
+
447
+ if not staged_changeset:
448
+ _status = summary_md # "No changes proposed" message
449
+ # Still update the cache with the AI's *view* of the world, even if no changes.
450
+ parsed_code_blocks_state_cache = parsing_res["parsed_code_blocks"]
451
+ _formatted, _detected, _download = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
452
+ yield (_chat_msg_in, _chat_hist, _status, _detected, _formatted, _download, [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False))
453
  else:
454
+ _status = "Change plan generated. Please review and confirm below."
455
+ yield (
456
+ _chat_msg_in, _chat_hist, _status,
457
+ gr.update(), gr.update(), gr.update(),
458
+ staged_changeset, # Send changeset to state
459
+ gr.update(value=summary_md), # Display summary
460
+ gr.update(visible=True), # Show the accordion
461
+ gr.update(visible=True), # Show confirm button
462
+ gr.update(visible=True) # Show cancel button
463
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
464
 
465
  except Exception as e:
466
  error_msg = f"An unexpected error occurred: {e}"
467
  print(f"Error in handle_chat_submit: {e}")
468
+ if _chat_hist: _chat_hist[-1] = (user_message, error_msg)
469
+ yield (
470
+ _chat_msg_in, _chat_hist, error_msg,
471
+ gr.update(), gr.update(), gr.update(),
472
+ [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
473
+ )
474
+
475
+ def handle_confirm_changes(hf_api_key, owner_name, space_name, changeset):
476
+ """Applies the staged changes from the changeset."""
477
+ global parsed_code_blocks_state_cache
478
+ if not changeset:
479
+ return "No changes to apply.", gr.update(), gr.update(), gr.update(), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
480
+
481
+ status_messages = []
482
+
483
+ # Handle space creation first, as other ops might depend on it
484
+ create_space_op = next((c for c in changeset if c['type'] == 'CREATE_SPACE'), None)
485
+ if create_space_op:
486
+ repo_parts = create_space_op['repo_id'].split('/')
487
+ if len(repo_parts) == 2:
488
+ owner, repo = repo_parts
489
+ # We need to pass the full markdown for creation. Let's build it from the plan.
490
+ # This is a simplification; a more robust solution would pass the planned files directly.
491
+ # For now, we assume the AI provides file content for the new space.
492
+
493
+ planned_files_md = [f"# Space: {create_space_op['repo_id']}"]
494
+ for change in changeset:
495
+ if change['type'] in ['CREATE_FILE', 'UPDATE_FILE']:
496
+ planned_files_md.append(f"### File: {change['path']}\n{bbb}{change.get('lang', 'plaintext')}\n{change['content']}\n{bbb}")
497
+
498
+ markdown_for_creation = "\n\n".join(planned_files_md)
499
+
500
+ result = build_logic_create_space(
501
+ ui_api_token_from_textbox=hf_api_key,
502
+ space_name_ui=repo,
503
+ owner_ui=owner,
504
+ sdk_ui=create_space_op['sdk'],
505
+ private=create_space_op['private'],
506
+ markdown_input=markdown_for_creation
507
+ )
508
+ status_messages.append(f"CREATE_SPACE: {result}")
509
+ if "Error" in result:
510
+ # Stop if space creation failed
511
+ final_status = " | ".join(status_messages)
512
+ return final_status, gr.update(), gr.update(), gr.update(), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), []
513
+
514
+ # Apply all other changes
515
+ for change in changeset:
516
+ try:
517
+ if change['type'] == 'UPDATE_FILE':
518
+ msg = update_space_file(hf_api_key, space_name, owner_name, change['path'], change['content'], f"AI: Update {change['path']}")
519
+ status_messages.append(f"UPDATE '{change['path']}': {msg}")
520
+ if "Success" in msg:
521
+ # Update cache on success
522
+ for block in parsed_code_blocks_state_cache:
523
+ if block['filename'] == change['path']:
524
+ block['code'] = change['content']
525
+ break
526
+ elif change['type'] == 'CREATE_FILE' and not create_space_op: # Don't re-create if handled by CREATE_SPACE
527
+ msg = update_space_file(hf_api_key, space_name, owner_name, change['path'], change['content'], f"AI: Create {change['path']}")
528
+ status_messages.append(f"CREATE '{change['path']}': {msg}")
529
+ if "Success" in msg:
530
+ parsed_code_blocks_state_cache.append({'filename': change['path'], 'code': change['content'], 'language': change['lang'], 'is_binary': False})
531
+ elif change['type'] == 'DELETE_FILE':
532
+ msg = build_logic_delete_space_file(hf_api_key, space_name, owner_name, change['path'])
533
+ status_messages.append(f"DELETE '{change['path']}': {msg}")
534
+ if "Success" in msg:
535
+ parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != change['path']]
536
+ elif change['type'] == 'SET_PRIVACY':
537
+ msg = build_logic_set_space_privacy(hf_api_key, change['repo_id'], change['private'])
538
+ status_messages.append(f"SET_PRIVACY: {msg}")
539
+ elif change['type'] == 'DELETE_SPACE':
540
+ msg = build_logic_delete_space(hf_api_key, change['owner'], change['space_name'])
541
+ status_messages.append(f"DELETE_SPACE: {msg}")
542
+ if "Success" in msg:
543
+ parsed_code_blocks_state_cache = [] # Clear everything
544
+ except Exception as e:
545
+ status_messages.append(f"Error applying {change['type']} for {change.get('path', '')}: {e}")
546
+
547
+ final_status = " | ".join(status_messages)
548
+ _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_name, space_name)
549
+
550
+ # Hide the confirmation UI and clear the state
551
+ return final_status, _formatted, _detected, _download, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), []
552
 
553
+ def handle_cancel_changes():
554
+ """Clears the staged changeset and hides the confirmation UI."""
555
+ return "Changes cancelled.", [], gr.update(value="*No changes proposed.*"), gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
556
 
557
  def update_models_dropdown(provider_select):
558
  if not provider_select: return gr.update(choices=[], value=None)
 
685
  for key, val in status.items():
686
  md += f"- **{key.replace('_', ' ').title()}:** `{val}`\n"
687
  return md
 
 
 
 
 
 
 
 
 
688
 
689
+ # --- UI Theming and CSS (Unchanged) ---
690
+ custom_theme = gr.themes.Base(primary_hue="teal", secondary_hue="purple", neutral_hue="zinc", text_size="sm", spacing_size="md", radius_size="sm", font=["System UI", "sans-serif"])
691
  custom_css = """
692
+ body { background: linear-gradient(to bottom right, #2c3e50, #34495e); color: #ecf0f1; }
693
+ .gradio-container { background: transparent !important; }
694
+ .gr-box, .gr-panel, .gr-pill { background-color: rgba(44, 62, 80, 0.8) !important; border-color: rgba(189, 195, 199, 0.2) !important; }
695
+ .gr-textbox, .gr-dropdown, .gr-button, .gr-code, .gr-chat-message { border-color: rgba(189, 195, 199, 0.3) !important; background-color: rgba(52, 73, 94, 0.9) !important; color: #ecf0f1 !important; }
696
+ .gr-button.gr-button-primary { background-color: #1abc9c !important; color: white !important; border-color: #16a085 !important; }
697
+ .gr-button.gr-button-secondary { background-color: #9b59b6 !important; color: white !important; border-color: #8e44ad !important; }
698
+ .gr-button.gr-button-stop { background-color: #e74c3c !important; color: white !important; border-color: #c0392b !important; }
699
+ .gr-markdown { background-color: rgba(44, 62, 80, 0.7) !important; padding: 10px; border-radius: 5px; }
700
+ .gr-markdown h1, .gr-markdown h2, .gr-markdown h3, .gr-markdown h4, .gr-markdown h5, .gr-markdown h6 { color: #ecf0f1 !important; border-bottom-color: rgba(189, 195, 199, 0.3) !important; }
701
+ .gr-markdown pre code { background-color: rgba(52, 73, 94, 0.95) !important; border-color: rgba(189, 195, 199, 0.3) !important; }
702
+ .gr-chatbot { background-color: rgba(44, 62, 80, 0.7) !important; border-color: rgba(189, 195, 199, 0.2) !important; }
703
+ .gr-chatbot .message { background-color: rgba(52, 73, 94, 0.9) !important; color: #ecf0f1 !important; border-color: rgba(189, 195, 199, 0.3) !important; }
704
+ .gr-chatbot .message.user { background-color: rgba(46, 204, 113, 0.9) !important; color: black !important; }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
705
  """
706
 
707
  # --- Gradio UI Definition ---
 
708
  with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
709
+ # --- NEW: State to hold the plan ---
710
+ changeset_state = gr.State([])
711
+
712
  gr.Markdown("# πŸ€– AI-Powered Hugging Face Space Builder")
713
  gr.Markdown("Use an AI assistant to create, modify, build, and manage your Hugging Face Spaces directly from this interface.")
714
 
 
733
  send_chat_button = gr.Button("Send", variant="primary", scale=1)
734
  status_output = gr.Textbox(label="Last Action Status", interactive=False, value="Ready.")
735
 
736
+ # --- NEW: Confirmation Accordion ---
737
+ with gr.Accordion("πŸ“ Proposed Changes (Pending Confirmation)", visible=False) as confirm_accordion:
738
+ changeset_display = gr.Markdown("No changes proposed.")
739
+ with gr.Row():
740
+ confirm_button = gr.Button("βœ… Confirm & Apply Changes", variant="primary", visible=False)
741
+ cancel_button = gr.Button("❌ Cancel", variant="stop", visible=False)
742
+
743
  with gr.Tabs():
744
  with gr.TabItem("πŸ“ Generated Markdown & Build"):
745
  with gr.Row():
 
750
  gr.Markdown("### Build Controls")
751
  space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio")
752
  space_private_checkbox = gr.Checkbox(label="Make Space Private", value=False)
753
+ build_space_button = gr.Button("πŸš€ Build / Update Space from Manual Edit", variant="primary")
754
  build_status_display = gr.Textbox(label="Build Operation Status", interactive=False)
755
  refresh_status_button = gr.Button("πŸ”„ Refresh Runtime Status")
756
  space_runtime_status_display = gr.Markdown("*Runtime status will appear here.*")
 
776
  # --- Event Listeners ---
777
  provider_select.change(update_models_dropdown, inputs=provider_select, outputs=model_select)
778
 
779
+ chat_inputs = [chat_message_input, chatbot_display, hf_api_key_input, provider_select, model_select, system_prompt_input, owner_name_input, space_name_input]
780
+ chat_outputs = [
781
+ chat_message_input, chatbot_display, status_output,
782
+ detected_files_preview, formatted_space_output_display, download_button,
783
+ changeset_state, changeset_display, confirm_accordion, confirm_button, cancel_button
784
+ ]
785
  send_chat_button.click(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
786
  chat_message_input.submit(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
787
 
788
+ # --- NEW: Confirmation Button Listeners ---
789
+ confirm_inputs = [hf_api_key_input, owner_name_input, space_name_input, changeset_state]
790
+ confirm_outputs = [
791
+ status_output, formatted_space_output_display, detected_files_preview, download_button,
792
+ confirm_accordion, confirm_button, cancel_button, changeset_state
793
+ ]
794
+ confirm_button.click(handle_confirm_changes, inputs=confirm_inputs, outputs=confirm_outputs)
795
+
796
+ cancel_outputs = [
797
+ status_output, changeset_state, changeset_display,
798
+ confirm_accordion, confirm_button, cancel_button
799
+ ]
800
+ cancel_button.click(handle_cancel_changes, inputs=None, outputs=cancel_outputs)
801
+
802
+
803
  load_space_outputs = [formatted_space_output_display, detected_files_preview, status_output, file_browser_dropdown, owner_name_input, space_name_input, space_iframe_display, download_button, build_status_display, edit_status_display, space_runtime_status_display, chatbot_display]
804
  load_space_button.click(fn=handle_load_existing_space, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=load_space_outputs)
805