Commit
Β·
81e730f
1
Parent(s):
4960b47
fix: Finalize LLM path for deployment
Browse files- core/creative_chat.py +11 -8
core/creative_chat.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
|
|
| 1 |
import os
|
| 2 |
import sys
|
| 3 |
from llama_cpp import Llama
|
|
@@ -5,6 +6,7 @@ import json
|
|
| 5 |
import re
|
| 6 |
|
| 7 |
# Path setup to import VectorStore from the parent directory
|
|
|
|
| 8 |
current_dir = os.path.dirname(os.path.abspath(__file__))
|
| 9 |
parent_dir = os.path.dirname(current_dir)
|
| 10 |
sys.path.append(parent_dir)
|
|
@@ -15,11 +17,17 @@ class CreativeDirector:
|
|
| 15 |
def __init__(self):
|
| 16 |
"""Initialize Model and Memory once to save time."""
|
| 17 |
|
| 18 |
-
#
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
if not os.path.exists(model_path):
|
| 22 |
-
|
|
|
|
| 23 |
|
| 24 |
print("π§ Loading AI Director (TinyLlama - SUPER FAST MODE)...")
|
| 25 |
|
|
@@ -72,7 +80,6 @@ Response:"""
|
|
| 72 |
|
| 73 |
conversation_summary = "\n".join([f"- {msg['content']}" for msg in history[-3:]])
|
| 74 |
|
| 75 |
-
# β
PROMPT: Ask for plain text with specific labels
|
| 76 |
prompt = f"""Instruction: Create a video script for "{task_context}".
|
| 77 |
Chat Summary: {conversation_summary}
|
| 78 |
|
|
@@ -96,14 +103,11 @@ Response:"""
|
|
| 96 |
raw_text = response['choices'][0]['text'].strip()
|
| 97 |
print(f" - π€ Raw Text: {raw_text}")
|
| 98 |
|
| 99 |
-
# β
ROBUST PARSING (Regex)
|
| 100 |
-
# Text me se 'HOOK:', 'SCRIPT:' dhund kar nikalenge
|
| 101 |
hook_match = re.search(r'HOOK:\s*(.*?)(?=\nSCRIPT:)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 102 |
script_match = re.search(r'SCRIPT:\s*(.*?)(?=\nVISUALS:)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 103 |
visuals_match = re.search(r'VISUALS:\s*(.*?)(?=\nTOOLS:)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 104 |
tools_match = re.search(r'TOOLS:\s*(.*)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 105 |
|
| 106 |
-
# Agar match na mile to poora text script me daal do
|
| 107 |
return {
|
| 108 |
"hook": hook_match.group(1).strip() if hook_match else "Start with a bang!",
|
| 109 |
"script": script_match.group(1).strip() if script_match else raw_text,
|
|
@@ -113,7 +117,6 @@ Response:"""
|
|
| 113 |
|
| 114 |
except Exception as e:
|
| 115 |
print(f" - β Final Plan Generation Error: {e}")
|
| 116 |
-
# Fallback JSON
|
| 117 |
return {
|
| 118 |
"hook": "Error generating plan.",
|
| 119 |
"script": "Please try again later.",
|
|
|
|
| 1 |
+
|
| 2 |
import os
|
| 3 |
import sys
|
| 4 |
from llama_cpp import Llama
|
|
|
|
| 6 |
import re
|
| 7 |
|
| 8 |
# Path setup to import VectorStore from the parent directory
|
| 9 |
+
# This logic is correct and remains the same
|
| 10 |
current_dir = os.path.dirname(os.path.abspath(__file__))
|
| 11 |
parent_dir = os.path.dirname(current_dir)
|
| 12 |
sys.path.append(parent_dir)
|
|
|
|
| 17 |
def __init__(self):
|
| 18 |
"""Initialize Model and Memory once to save time."""
|
| 19 |
|
| 20 |
+
# β
THE FIX IS HERE: We now look for the model in the writable directory
|
| 21 |
+
# where main.py downloads it, not in the read-only application directory.
|
| 22 |
+
model_name = "tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
|
| 23 |
+
# Hugging Face Spaces provides '/data' as a writable persistent directory.
|
| 24 |
+
writable_dir = os.environ.get("WRITABLE_DIR", "/data")
|
| 25 |
+
model_path = os.path.join(writable_dir, "llm_model", model_name)
|
| 26 |
+
# =====================================================================
|
| 27 |
|
| 28 |
if not os.path.exists(model_path):
|
| 29 |
+
# This error will now correctly point to the writable directory
|
| 30 |
+
raise FileNotFoundError(f"β Model not found at: {model_path}. Please check the download logic in main.py.")
|
| 31 |
|
| 32 |
print("π§ Loading AI Director (TinyLlama - SUPER FAST MODE)...")
|
| 33 |
|
|
|
|
| 80 |
|
| 81 |
conversation_summary = "\n".join([f"- {msg['content']}" for msg in history[-3:]])
|
| 82 |
|
|
|
|
| 83 |
prompt = f"""Instruction: Create a video script for "{task_context}".
|
| 84 |
Chat Summary: {conversation_summary}
|
| 85 |
|
|
|
|
| 103 |
raw_text = response['choices'][0]['text'].strip()
|
| 104 |
print(f" - π€ Raw Text: {raw_text}")
|
| 105 |
|
|
|
|
|
|
|
| 106 |
hook_match = re.search(r'HOOK:\s*(.*?)(?=\nSCRIPT:)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 107 |
script_match = re.search(r'SCRIPT:\s*(.*?)(?=\nVISUALS:)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 108 |
visuals_match = re.search(r'VISUALS:\s*(.*?)(?=\nTOOLS:)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 109 |
tools_match = re.search(r'TOOLS:\s*(.*)', raw_text, re.DOTALL | re.IGNORECASE)
|
| 110 |
|
|
|
|
| 111 |
return {
|
| 112 |
"hook": hook_match.group(1).strip() if hook_match else "Start with a bang!",
|
| 113 |
"script": script_match.group(1).strip() if script_match else raw_text,
|
|
|
|
| 117 |
|
| 118 |
except Exception as e:
|
| 119 |
print(f" - β Final Plan Generation Error: {e}")
|
|
|
|
| 120 |
return {
|
| 121 |
"hook": "Error generating plan.",
|
| 122 |
"script": "Please try again later.",
|