Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1614,11 +1614,6 @@ def main():
|
|
| 1614 |
st.session_state.custom_model = "gpt-4o" # Default model
|
| 1615 |
st.session_state.first_load_complete = False # Prevent refreshes on first load
|
| 1616 |
st.session_state.pending_tab_switch = None # Track pending tab switches
|
| 1617 |
-
|
| 1618 |
-
# New chat-related session state variables
|
| 1619 |
-
st.session_state.chat_history = [] # Store chat messages
|
| 1620 |
-
st.session_state.user_message = "" # Current user message
|
| 1621 |
-
st.session_state.chat_export_data = None # For exporting chat
|
| 1622 |
|
| 1623 |
# Page configuration with improved layout
|
| 1624 |
st.set_page_config(
|
|
@@ -1628,7 +1623,7 @@ def main():
|
|
| 1628 |
initial_sidebar_state="expanded"
|
| 1629 |
)
|
| 1630 |
|
| 1631 |
-
# Custom CSS for improved UI
|
| 1632 |
st.markdown("""
|
| 1633 |
<style>
|
| 1634 |
.main-header {
|
|
@@ -1726,67 +1721,6 @@ def main():
|
|
| 1726 |
margin-top: 1rem;
|
| 1727 |
border-left: 4px solid #ef4444;
|
| 1728 |
}
|
| 1729 |
-
|
| 1730 |
-
/* Chat UI styles */
|
| 1731 |
-
.chat-container {
|
| 1732 |
-
display: flex;
|
| 1733 |
-
flex-direction: column;
|
| 1734 |
-
height: 500px;
|
| 1735 |
-
overflow-y: auto;
|
| 1736 |
-
border: 1px solid #e0e0e0;
|
| 1737 |
-
border-radius: 10px;
|
| 1738 |
-
padding: 1rem;
|
| 1739 |
-
margin-bottom: 1rem;
|
| 1740 |
-
background-color: #f8f9fa;
|
| 1741 |
-
}
|
| 1742 |
-
|
| 1743 |
-
.user-message {
|
| 1744 |
-
align-self: flex-end;
|
| 1745 |
-
background-color: #4F46E5;
|
| 1746 |
-
color: white;
|
| 1747 |
-
border-radius: 18px 18px 0 18px;
|
| 1748 |
-
padding: 0.8rem 1rem;
|
| 1749 |
-
margin: 0.5rem 0;
|
| 1750 |
-
max-width: 80%;
|
| 1751 |
-
}
|
| 1752 |
-
|
| 1753 |
-
.ai-message {
|
| 1754 |
-
align-self: flex-start;
|
| 1755 |
-
background-color: #e0e0e0;
|
| 1756 |
-
color: #333;
|
| 1757 |
-
border-radius: 18px 18px 18px 0;
|
| 1758 |
-
padding: 0.8rem 1rem;
|
| 1759 |
-
margin: 0.5rem 0;
|
| 1760 |
-
max-width: 80%;
|
| 1761 |
-
}
|
| 1762 |
-
|
| 1763 |
-
.message-time {
|
| 1764 |
-
font-size: 0.7rem;
|
| 1765 |
-
color: #777;
|
| 1766 |
-
margin-top: 0.2rem;
|
| 1767 |
-
}
|
| 1768 |
-
|
| 1769 |
-
.chat-input {
|
| 1770 |
-
display: flex;
|
| 1771 |
-
margin-top: 1rem;
|
| 1772 |
-
}
|
| 1773 |
-
|
| 1774 |
-
.chat-input input {
|
| 1775 |
-
flex-grow: 1;
|
| 1776 |
-
padding: 0.8rem;
|
| 1777 |
-
border: 1px solid #ccc;
|
| 1778 |
-
border-radius: 5px;
|
| 1779 |
-
margin-right: 0.5rem;
|
| 1780 |
-
}
|
| 1781 |
-
|
| 1782 |
-
.chat-input button {
|
| 1783 |
-
padding: 0.8rem 1.5rem;
|
| 1784 |
-
background-color: #4F46E5;
|
| 1785 |
-
color: white;
|
| 1786 |
-
border: none;
|
| 1787 |
-
border-radius: 5px;
|
| 1788 |
-
cursor: pointer;
|
| 1789 |
-
}
|
| 1790 |
</style>
|
| 1791 |
""", unsafe_allow_html=True)
|
| 1792 |
|
|
@@ -2043,7 +1977,7 @@ class MyScene(Scene):
|
|
| 2043 |
else:
|
| 2044 |
st.success(st.session_state.status)
|
| 2045 |
|
| 2046 |
-
# AI ASSISTANT TAB
|
| 2047 |
with tabs[1]:
|
| 2048 |
st.markdown("### 🤖 AI Animation Assistant")
|
| 2049 |
|
|
@@ -2083,7 +2017,7 @@ class MyScene(Scene):
|
|
| 2083 |
messages=[
|
| 2084 |
UserMessage("Hello, this is a connection test."),
|
| 2085 |
],
|
| 2086 |
-
max_tokens=
|
| 2087 |
model=model_name
|
| 2088 |
)
|
| 2089 |
|
|
@@ -2118,7 +2052,7 @@ class MyScene(Scene):
|
|
| 2118 |
"DeepSeek-R1",
|
| 2119 |
"Meta-Llama-3.1-405B-Instruct",
|
| 2120 |
"Llama-3.2-90B-Vision-Instruct",
|
| 2121 |
-
"Llama-3.3-70B-Instruct"
|
| 2122 |
"Llama-4-Scout-17B-16E-Instruct",
|
| 2123 |
"Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 2124 |
"gpt-4o-mini",
|
|
@@ -2148,287 +2082,46 @@ class MyScene(Scene):
|
|
| 2148 |
st.session_state.ai_models['model_name'] = st.session_state.custom_model
|
| 2149 |
st.success(f"Model updated to {st.session_state.custom_model}")
|
| 2150 |
|
| 2151 |
-
#
|
| 2152 |
if st.session_state.ai_models and "client" in st.session_state.ai_models:
|
| 2153 |
-
st.markdown("
|
| 2154 |
-
st.markdown("
|
| 2155 |
-
|
| 2156 |
-
# Chat history display container
|
| 2157 |
-
st.markdown("<div class='chat-container' id='chat-container'>", unsafe_allow_html=True)
|
| 2158 |
|
| 2159 |
-
#
|
| 2160 |
-
|
| 2161 |
-
|
| 2162 |
-
|
| 2163 |
-
|
| 2164 |
-
|
| 2165 |
-
|
| 2166 |
-
|
| 2167 |
-
|
| 2168 |
-
)
|
| 2169 |
|
| 2170 |
-
|
| 2171 |
-
|
| 2172 |
-
|
| 2173 |
-
|
| 2174 |
-
f"<div class='user-message'>{message['content']}"
|
| 2175 |
-
f"<div class='message-time'>{message['time']}</div>"
|
| 2176 |
-
"</div>",
|
| 2177 |
-
unsafe_allow_html=True
|
| 2178 |
-
)
|
| 2179 |
-
else:
|
| 2180 |
-
st.markdown(
|
| 2181 |
-
f"<div class='ai-message'>{message['content']}"
|
| 2182 |
-
f"<div class='message-time'>{message['time']}</div>"
|
| 2183 |
-
"</div>",
|
| 2184 |
-
unsafe_allow_html=True
|
| 2185 |
-
)
|
| 2186 |
|
| 2187 |
-
|
| 2188 |
|
| 2189 |
-
|
| 2190 |
-
|
| 2191 |
-
|
| 2192 |
-
|
| 2193 |
-
|
| 2194 |
-
|
| 2195 |
-
"Your message",
|
| 2196 |
-
value=st.session_state.user_message,
|
| 2197 |
-
key="user_input_area",
|
| 2198 |
-
placeholder="Ask about animation concepts, request code examples, or describe what you want to create...",
|
| 2199 |
-
height=100
|
| 2200 |
-
)
|
| 2201 |
-
|
| 2202 |
-
with col2:
|
| 2203 |
-
submit_button = st.form_submit_button("Send")
|
| 2204 |
|
| 2205 |
-
|
| 2206 |
-
|
| 2207 |
-
|
| 2208 |
-
|
| 2209 |
-
|
| 2210 |
-
|
| 2211 |
-
|
| 2212 |
-
"time": current_time
|
| 2213 |
-
})
|
| 2214 |
-
|
| 2215 |
-
# Get AI response
|
| 2216 |
-
with st.spinner("AI is thinking..."):
|
| 2217 |
-
try:
|
| 2218 |
-
client = st.session_state.ai_models["client"]
|
| 2219 |
-
model_name = st.session_state.ai_models["model_name"]
|
| 2220 |
-
|
| 2221 |
-
# Convert chat history to API format
|
| 2222 |
-
from azure.ai.inference.models import UserMessage, AssistantMessage
|
| 2223 |
-
|
| 2224 |
-
# Create messages array for API
|
| 2225 |
-
messages = []
|
| 2226 |
-
|
| 2227 |
-
# Add system message with Manim context
|
| 2228 |
-
system_message = """
|
| 2229 |
-
I am a Manim Animation Assistant. I can help with:
|
| 2230 |
-
1. Creating mathematical animations using Manim
|
| 2231 |
-
2. Explaining animation concepts and techniques
|
| 2232 |
-
3. Generating and debugging Manim code
|
| 2233 |
-
4. Offering creative suggestions for visualizing concepts
|
| 2234 |
-
|
| 2235 |
-
When providing code, I'll make sure it's complete, runnable Manim code with proper imports and structure.
|
| 2236 |
-
"""
|
| 2237 |
-
|
| 2238 |
-
# Add system message as first user message (workaround for some models)
|
| 2239 |
-
messages.append(UserMessage("You are a Manim animation expert assistant. Respond to my questions with helpful information about creating mathematical animations with Manim."))
|
| 2240 |
-
|
| 2241 |
-
# Add chat history (limited to last 10 messages for context window)
|
| 2242 |
-
for msg in st.session_state.chat_history[-10:]:
|
| 2243 |
-
if msg["role"] == "user":
|
| 2244 |
-
messages.append(UserMessage(msg["content"]))
|
| 2245 |
-
else:
|
| 2246 |
-
messages.append(AssistantMessage(msg["content"]))
|
| 2247 |
-
|
| 2248 |
-
# Get response from API
|
| 2249 |
-
response = client.complete(
|
| 2250 |
-
messages=messages,
|
| 2251 |
-
max_tokens=1000,
|
| 2252 |
-
model=model_name
|
| 2253 |
-
)
|
| 2254 |
-
|
| 2255 |
-
# Process the response
|
| 2256 |
-
if response and response.choices and len(response.choices) > 0:
|
| 2257 |
-
ai_response = response.choices[0].message.content
|
| 2258 |
-
|
| 2259 |
-
# Store AI response in history
|
| 2260 |
-
st.session_state.chat_history.append({
|
| 2261 |
-
"role": "assistant",
|
| 2262 |
-
"content": ai_response,
|
| 2263 |
-
"time": datetime.now().strftime("%H:%M:%S")
|
| 2264 |
-
})
|
| 2265 |
|
| 2266 |
-
#
|
| 2267 |
-
|
| 2268 |
-
# Extract code blocks
|
| 2269 |
-
code_blocks = []
|
| 2270 |
-
parts = ai_response.split("```python")
|
| 2271 |
-
for part in parts[1:]: # Skip the first part (before first code block)
|
| 2272 |
-
if "```" in part:
|
| 2273 |
-
code = part.split("```")[0].strip()
|
| 2274 |
-
code_blocks.append(code)
|
| 2275 |
-
|
| 2276 |
-
# If we have code blocks, offer to use the first one
|
| 2277 |
-
if code_blocks and "class" in code_blocks[0] and "Scene" in code_blocks[0]:
|
| 2278 |
-
st.info("Manim code detected in the response. Would you like to use it?")
|
| 2279 |
-
if st.button("Use This Code in Editor", key="use_chat_code"):
|
| 2280 |
-
st.session_state.code = code_blocks[0]
|
| 2281 |
-
st.session_state.temp_code = code_blocks[0]
|
| 2282 |
-
st.session_state.pending_tab_switch = 0 # Switch to editor tab
|
| 2283 |
-
st.rerun()
|
| 2284 |
-
else:
|
| 2285 |
-
# Handle empty response
|
| 2286 |
-
st.error("The AI model returned an empty response. Please try again.")
|
| 2287 |
-
except Exception as e:
|
| 2288 |
-
st.error(f"Error getting AI response: {str(e)}")
|
| 2289 |
-
import traceback
|
| 2290 |
-
st.code(traceback.format_exc())
|
| 2291 |
-
|
| 2292 |
-
# Add error message to chat
|
| 2293 |
-
st.session_state.chat_history.append({
|
| 2294 |
-
"role": "assistant",
|
| 2295 |
-
"content": f"Sorry, I encountered an error: {str(e)}. Please try again or check the connection.",
|
| 2296 |
-
"time": datetime.now().strftime("%H:%M:%S")
|
| 2297 |
-
})
|
| 2298 |
-
|
| 2299 |
-
# Clear the user input
|
| 2300 |
-
st.session_state.user_message = ""
|
| 2301 |
-
# Force a rerun to update the chat display
|
| 2302 |
-
st.rerun()
|
| 2303 |
-
|
| 2304 |
-
# Chat export options
|
| 2305 |
-
if st.session_state.chat_history:
|
| 2306 |
-
st.markdown("#### Chat Options")
|
| 2307 |
-
col1, col2 = st.columns(2)
|
| 2308 |
-
|
| 2309 |
-
with col1:
|
| 2310 |
-
if st.button("Export Chat", key="export_chat_btn"):
|
| 2311 |
-
# Format chat history as text
|
| 2312 |
-
chat_text = "# Manim Animation Chat Export\n\n"
|
| 2313 |
-
for msg in st.session_state.chat_history:
|
| 2314 |
-
role = "You" if msg["role"] == "user" else "AI Assistant"
|
| 2315 |
-
chat_text += f"## {role} ({msg['time']})\n\n{msg['content']}\n\n"
|
| 2316 |
-
|
| 2317 |
-
# Store in session state for download
|
| 2318 |
-
st.session_state.chat_export_data = chat_text
|
| 2319 |
-
|
| 2320 |
-
# Show download button
|
| 2321 |
-
st.download_button(
|
| 2322 |
-
"📥 Download Chat Text",
|
| 2323 |
-
data=st.session_state.chat_export_data,
|
| 2324 |
-
file_name=f"manim_chat_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md",
|
| 2325 |
-
mime="text/markdown"
|
| 2326 |
-
)
|
| 2327 |
-
|
| 2328 |
-
with col2:
|
| 2329 |
-
if st.button("Clear Chat", key="clear_chat_btn"):
|
| 2330 |
-
# Clear chat history
|
| 2331 |
-
st.session_state.chat_history = []
|
| 2332 |
-
st.success("Chat history cleared!")
|
| 2333 |
-
st.rerun()
|
| 2334 |
-
|
| 2335 |
-
# Additional AI features (code generation, etc.)
|
| 2336 |
-
with st.expander("✨ Animation Idea Generator"):
|
| 2337 |
-
st.markdown("Let the AI help you generate animation ideas based on concepts.")
|
| 2338 |
-
|
| 2339 |
-
concept_input = st.text_input(
|
| 2340 |
-
"Enter a mathematical or scientific concept:",
|
| 2341 |
-
placeholder="e.g., Pythagorean theorem, Wave interference, Neural networks"
|
| 2342 |
-
)
|
| 2343 |
-
|
| 2344 |
-
if st.button("Generate Animation Ideas", key="gen_ideas_btn"):
|
| 2345 |
-
if concept_input:
|
| 2346 |
-
with st.spinner("Generating ideas..."):
|
| 2347 |
-
try:
|
| 2348 |
-
# Get ideas from AI
|
| 2349 |
-
client = st.session_state.ai_models["client"]
|
| 2350 |
-
model_name = st.session_state.ai_models["model_name"]
|
| 2351 |
-
|
| 2352 |
-
idea_prompt = f"""Generate 3 creative animation ideas for visualizing the concept of "{concept_input}" using Manim.
|
| 2353 |
-
For each idea, provide:
|
| 2354 |
-
1. A title
|
| 2355 |
-
2. A brief description of what the animation would show
|
| 2356 |
-
3. Key visual elements to include
|
| 2357 |
-
|
| 2358 |
-
Format your response with clear sections for each idea."""
|
| 2359 |
-
|
| 2360 |
-
from azure.ai.inference.models import UserMessage
|
| 2361 |
-
response = client.complete(
|
| 2362 |
-
messages=[
|
| 2363 |
-
UserMessage(idea_prompt),
|
| 2364 |
-
],
|
| 2365 |
-
max_tokens=800,
|
| 2366 |
-
model=model_name
|
| 2367 |
-
)
|
| 2368 |
-
|
| 2369 |
-
if response and response.choices and len(response.choices) > 0:
|
| 2370 |
-
ideas = response.choices[0].message.content
|
| 2371 |
-
st.markdown("### Animation Ideas")
|
| 2372 |
-
st.markdown(ideas)
|
| 2373 |
-
|
| 2374 |
-
# Add to chat history
|
| 2375 |
-
st.session_state.chat_history.append({
|
| 2376 |
-
"role": "user",
|
| 2377 |
-
"content": f"Generate animation ideas for: {concept_input}",
|
| 2378 |
-
"time": datetime.now().strftime("%H:%M:%S")
|
| 2379 |
-
})
|
| 2380 |
-
|
| 2381 |
-
st.session_state.chat_history.append({
|
| 2382 |
-
"role": "assistant",
|
| 2383 |
-
"content": ideas,
|
| 2384 |
-
"time": datetime.now().strftime("%H:%M:%S")
|
| 2385 |
-
})
|
| 2386 |
-
else:
|
| 2387 |
-
st.error("Failed to generate ideas. The AI returned an empty response.")
|
| 2388 |
-
except Exception as e:
|
| 2389 |
-
st.error(f"Error generating ideas: {str(e)}")
|
| 2390 |
-
else:
|
| 2391 |
-
st.warning("Please enter a concept first")
|
| 2392 |
-
|
| 2393 |
-
# AI code generation
|
| 2394 |
-
with st.expander("🧩 Quick Code Generator"):
|
| 2395 |
-
st.markdown("#### Generate Animation from Description")
|
| 2396 |
-
st.write("Describe the animation you want to create, or provide partial code to complete.")
|
| 2397 |
-
|
| 2398 |
-
# Predefined animation ideas dropdown
|
| 2399 |
-
animation_ideas = [
|
| 2400 |
-
"Select an idea...",
|
| 2401 |
-
"Create a 3D animation showing a sphere morphing into a torus",
|
| 2402 |
-
"Show a visual proof of the Pythagorean theorem",
|
| 2403 |
-
"Visualize a Fourier transform converting a signal from time domain to frequency domain",
|
| 2404 |
-
"Create an animation explaining neural network forward propagation",
|
| 2405 |
-
"Illustrate the concept of integration with area under a curve"
|
| 2406 |
-
]
|
| 2407 |
-
|
| 2408 |
-
selected_idea = st.selectbox(
|
| 2409 |
-
"Try one of these ideas",
|
| 2410 |
-
options=animation_ideas
|
| 2411 |
-
)
|
| 2412 |
-
|
| 2413 |
-
prompt_value = selected_idea if selected_idea != "Select an idea..." else ""
|
| 2414 |
-
|
| 2415 |
-
code_input = st.text_area(
|
| 2416 |
-
"Your Prompt or Code",
|
| 2417 |
-
value=prompt_value,
|
| 2418 |
-
placeholder="Example: Create an animation that shows a circle morphing into a square while changing color from red to blue",
|
| 2419 |
-
height=120
|
| 2420 |
-
)
|
| 2421 |
-
|
| 2422 |
-
if st.button("Generate Animation Code", key="gen_ai_code"):
|
| 2423 |
-
if code_input:
|
| 2424 |
-
with st.spinner("AI is generating your animation code..."):
|
| 2425 |
-
try:
|
| 2426 |
-
# Direct implementation of code generation
|
| 2427 |
-
client = st.session_state.ai_models["client"]
|
| 2428 |
-
model_name = st.session_state.ai_models["model_name"]
|
| 2429 |
-
|
| 2430 |
-
# Create the prompt
|
| 2431 |
-
prompt = f"""Write a complete Manim animation scene based on this code or idea:
|
| 2432 |
{code_input}
|
| 2433 |
|
| 2434 |
The code should be a complete, working Manim animation that includes:
|
|
@@ -2439,93 +2132,84 @@ The code should be a complete, working Manim animation that includes:
|
|
| 2439 |
|
| 2440 |
Here's the complete Manim code:
|
| 2441 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2442 |
|
| 2443 |
-
#
|
| 2444 |
-
|
| 2445 |
-
|
| 2446 |
-
|
| 2447 |
-
|
| 2448 |
-
],
|
| 2449 |
-
max_tokens=1000,
|
| 2450 |
-
model=model_name
|
| 2451 |
-
)
|
| 2452 |
|
| 2453 |
-
#
|
| 2454 |
-
if
|
| 2455 |
-
completed_code =
|
| 2456 |
-
|
| 2457 |
-
# Extract code from markdown if present
|
| 2458 |
-
if "```python" in completed_code:
|
| 2459 |
-
completed_code = completed_code.split("```python")[1].split("```")[0]
|
| 2460 |
-
elif "```" in completed_code:
|
| 2461 |
-
completed_code = completed_code.split("```")[1].split("```")[0]
|
| 2462 |
-
|
| 2463 |
-
# Add Scene class if missing
|
| 2464 |
-
if "Scene" not in completed_code:
|
| 2465 |
-
completed_code = f"""from manim import *
|
| 2466 |
|
| 2467 |
class MyScene(Scene):
|
| 2468 |
def construct(self):
|
| 2469 |
{completed_code}"""
|
| 2470 |
-
|
| 2471 |
-
|
| 2472 |
-
|
| 2473 |
-
|
| 2474 |
-
|
| 2475 |
-
|
| 2476 |
-
|
| 2477 |
-
|
| 2478 |
-
|
| 2479 |
-
|
| 2480 |
-
|
| 2481 |
-
|
| 2482 |
-
|
| 2483 |
-
|
| 2484 |
-
|
| 2485 |
-
|
| 2486 |
-
|
| 2487 |
-
|
| 2488 |
-
|
| 2489 |
-
st.error(f"Error generating code: {str(e)}")
|
| 2490 |
-
import traceback
|
| 2491 |
-
st.code(traceback.format_exc())
|
| 2492 |
-
else:
|
| 2493 |
-
st.warning("Please enter a description or prompt first")
|
| 2494 |
|
| 2495 |
-
|
| 2496 |
-
|
| 2497 |
-
st.
|
| 2498 |
-
|
| 2499 |
-
|
| 2500 |
-
|
| 2501 |
-
|
| 2502 |
-
|
| 2503 |
-
|
| 2504 |
-
|
| 2505 |
-
|
| 2506 |
-
|
| 2507 |
-
|
| 2508 |
-
|
| 2509 |
-
|
| 2510 |
-
|
| 2511 |
-
|
| 2512 |
-
|
| 2513 |
-
|
| 2514 |
-
|
| 2515 |
-
|
| 2516 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2517 |
)
|
| 2518 |
-
|
| 2519 |
-
|
| 2520 |
-
|
| 2521 |
-
st.download_button(
|
| 2522 |
-
label="Download Preview",
|
| 2523 |
-
data=video_data,
|
| 2524 |
-
file_name=f"manim_preview_{int(time.time())}.mp4",
|
| 2525 |
-
mime="video/mp4"
|
| 2526 |
-
)
|
| 2527 |
-
else:
|
| 2528 |
-
st.error(f"Failed to generate preview: {status}")
|
| 2529 |
else:
|
| 2530 |
st.warning("AI models not initialized. Please use the Debug Connection section to test API connectivity.")
|
| 2531 |
else:
|
|
@@ -3363,6 +3047,4 @@ class PlotScene(Scene):
|
|
| 3363 |
st.session_state.first_load_complete = True
|
| 3364 |
|
| 3365 |
if __name__ == "__main__":
|
| 3366 |
-
main()
|
| 3367 |
-
|
| 3368 |
-
|
|
|
|
| 1614 |
st.session_state.custom_model = "gpt-4o" # Default model
|
| 1615 |
st.session_state.first_load_complete = False # Prevent refreshes on first load
|
| 1616 |
st.session_state.pending_tab_switch = None # Track pending tab switches
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1617 |
|
| 1618 |
# Page configuration with improved layout
|
| 1619 |
st.set_page_config(
|
|
|
|
| 1623 |
initial_sidebar_state="expanded"
|
| 1624 |
)
|
| 1625 |
|
| 1626 |
+
# Custom CSS for improved UI
|
| 1627 |
st.markdown("""
|
| 1628 |
<style>
|
| 1629 |
.main-header {
|
|
|
|
| 1721 |
margin-top: 1rem;
|
| 1722 |
border-left: 4px solid #ef4444;
|
| 1723 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1724 |
</style>
|
| 1725 |
""", unsafe_allow_html=True)
|
| 1726 |
|
|
|
|
| 1977 |
else:
|
| 1978 |
st.success(st.session_state.status)
|
| 1979 |
|
| 1980 |
+
# AI ASSISTANT TAB
|
| 1981 |
with tabs[1]:
|
| 1982 |
st.markdown("### 🤖 AI Animation Assistant")
|
| 1983 |
|
|
|
|
| 2017 |
messages=[
|
| 2018 |
UserMessage("Hello, this is a connection test."),
|
| 2019 |
],
|
| 2020 |
+
max_tokens=1000000,
|
| 2021 |
model=model_name
|
| 2022 |
)
|
| 2023 |
|
|
|
|
| 2052 |
"DeepSeek-R1",
|
| 2053 |
"Meta-Llama-3.1-405B-Instruct",
|
| 2054 |
"Llama-3.2-90B-Vision-Instruct",
|
| 2055 |
+
"Llama-3.3-70B-Instruct"
|
| 2056 |
"Llama-4-Scout-17B-16E-Instruct",
|
| 2057 |
"Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 2058 |
"gpt-4o-mini",
|
|
|
|
| 2082 |
st.session_state.ai_models['model_name'] = st.session_state.custom_model
|
| 2083 |
st.success(f"Model updated to {st.session_state.custom_model}")
|
| 2084 |
|
| 2085 |
+
# AI code generation
|
| 2086 |
if st.session_state.ai_models and "client" in st.session_state.ai_models:
|
| 2087 |
+
st.markdown("<div class='card'>", unsafe_allow_html=True)
|
| 2088 |
+
st.markdown("#### Generate Animation from Description")
|
| 2089 |
+
st.write("Describe the animation you want to create, or provide partial code to complete.")
|
|
|
|
|
|
|
| 2090 |
|
| 2091 |
+
# Predefined animation ideas dropdown
|
| 2092 |
+
animation_ideas = [
|
| 2093 |
+
"Select an idea...",
|
| 2094 |
+
"Create a 3D animation showing a sphere morphing into a torus",
|
| 2095 |
+
"Show a visual proof of the Pythagorean theorem",
|
| 2096 |
+
"Visualize a Fourier transform converting a signal from time domain to frequency domain",
|
| 2097 |
+
"Create an animation explaining neural network forward propagation",
|
| 2098 |
+
"Illustrate the concept of integration with area under a curve"
|
| 2099 |
+
]
|
|
|
|
| 2100 |
|
| 2101 |
+
selected_idea = st.selectbox(
|
| 2102 |
+
"Try one of these ideas",
|
| 2103 |
+
options=animation_ideas
|
| 2104 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2105 |
|
| 2106 |
+
prompt_value = selected_idea if selected_idea != "Select an idea..." else ""
|
| 2107 |
|
| 2108 |
+
code_input = st.text_area(
|
| 2109 |
+
"Your Prompt or Code",
|
| 2110 |
+
value=prompt_value,
|
| 2111 |
+
placeholder="Example: Create an animation that shows a circle morphing into a square while changing color from red to blue",
|
| 2112 |
+
height=150
|
| 2113 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2114 |
|
| 2115 |
+
if st.button("Generate Animation Code", key="gen_ai_code"):
|
| 2116 |
+
if code_input:
|
| 2117 |
+
with st.spinner("AI is generating your animation code..."):
|
| 2118 |
+
try:
|
| 2119 |
+
# Direct implementation of code generation
|
| 2120 |
+
client = st.session_state.ai_models["client"]
|
| 2121 |
+
model_name = st.session_state.ai_models["model_name"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2122 |
|
| 2123 |
+
# Create the prompt
|
| 2124 |
+
prompt = f"""Write a complete Manim animation scene based on this code or idea:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2125 |
{code_input}
|
| 2126 |
|
| 2127 |
The code should be a complete, working Manim animation that includes:
|
|
|
|
| 2132 |
|
| 2133 |
Here's the complete Manim code:
|
| 2134 |
"""
|
| 2135 |
+
|
| 2136 |
+
# Make API call directly
|
| 2137 |
+
from azure.ai.inference.models import UserMessage
|
| 2138 |
+
response = client.complete(
|
| 2139 |
+
messages=[
|
| 2140 |
+
UserMessage(prompt),
|
| 2141 |
+
],
|
| 2142 |
+
max_tokens=1000,
|
| 2143 |
+
model=model_name
|
| 2144 |
+
)
|
| 2145 |
+
|
| 2146 |
+
# Process the response
|
| 2147 |
+
if response and response.choices and len(response.choices) > 0:
|
| 2148 |
+
completed_code = response.choices[0].message.content
|
| 2149 |
|
| 2150 |
+
# Extract code from markdown if present
|
| 2151 |
+
if "```python" in completed_code:
|
| 2152 |
+
completed_code = completed_code.split("```python")[1].split("```")[0]
|
| 2153 |
+
elif "```" in completed_code:
|
| 2154 |
+
completed_code = completed_code.split("```")[1].split("```")[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2155 |
|
| 2156 |
+
# Add Scene class if missing
|
| 2157 |
+
if "Scene" not in completed_code:
|
| 2158 |
+
completed_code = f"""from manim import *
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2159 |
|
| 2160 |
class MyScene(Scene):
|
| 2161 |
def construct(self):
|
| 2162 |
{completed_code}"""
|
| 2163 |
+
|
| 2164 |
+
# Store the generated code
|
| 2165 |
+
st.session_state.generated_code = completed_code
|
| 2166 |
+
else:
|
| 2167 |
+
st.error("Failed to generate code. API returned an empty response.")
|
| 2168 |
+
except Exception as e:
|
| 2169 |
+
st.error(f"Error generating code: {str(e)}")
|
| 2170 |
+
import traceback
|
| 2171 |
+
st.code(traceback.format_exc())
|
| 2172 |
+
else:
|
| 2173 |
+
st.warning("Please enter a description or prompt first")
|
| 2174 |
+
|
| 2175 |
+
st.markdown("</div>", unsafe_allow_html=True)
|
| 2176 |
+
|
| 2177 |
+
# AI generated code display and actions
|
| 2178 |
+
if "generated_code" in st.session_state and st.session_state.generated_code:
|
| 2179 |
+
st.markdown("<div class='card'>", unsafe_allow_html=True)
|
| 2180 |
+
st.markdown("#### Generated Animation Code")
|
| 2181 |
+
st.code(st.session_state.generated_code, language="python")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2182 |
|
| 2183 |
+
col_ai1, col_ai2 = st.columns(2)
|
| 2184 |
+
with col_ai1:
|
| 2185 |
+
if st.button("Use This Code", key="use_gen_code"):
|
| 2186 |
+
st.session_state.code = st.session_state.generated_code
|
| 2187 |
+
st.session_state.temp_code = st.session_state.generated_code
|
| 2188 |
+
# Set pending tab switch to editor tab
|
| 2189 |
+
st.session_state.pending_tab_switch = 0
|
| 2190 |
+
st.rerun()
|
| 2191 |
+
|
| 2192 |
+
with col_ai2:
|
| 2193 |
+
if st.button("Render Preview", key="render_preview"):
|
| 2194 |
+
with st.spinner("Rendering preview..."):
|
| 2195 |
+
video_data, status = generate_manim_video(
|
| 2196 |
+
st.session_state.generated_code,
|
| 2197 |
+
"mp4",
|
| 2198 |
+
"480p", # Use lowest quality for preview
|
| 2199 |
+
ANIMATION_SPEEDS["Normal"]
|
| 2200 |
+
)
|
| 2201 |
+
|
| 2202 |
+
if video_data:
|
| 2203 |
+
st.video(video_data)
|
| 2204 |
+
st.download_button(
|
| 2205 |
+
label="Download Preview",
|
| 2206 |
+
data=video_data,
|
| 2207 |
+
file_name=f"manim_preview_{int(time.time())}.mp4",
|
| 2208 |
+
mime="video/mp4"
|
| 2209 |
)
|
| 2210 |
+
else:
|
| 2211 |
+
st.error(f"Failed to generate preview: {status}")
|
| 2212 |
+
st.markdown("</div>", unsafe_allow_html=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2213 |
else:
|
| 2214 |
st.warning("AI models not initialized. Please use the Debug Connection section to test API connectivity.")
|
| 2215 |
else:
|
|
|
|
| 3047 |
st.session_state.first_load_complete = True
|
| 3048 |
|
| 3049 |
if __name__ == "__main__":
|
| 3050 |
+
main()
|
|
|
|
|
|