File size: 14,148 Bytes
14ad967
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
import streamlit as st
from streamlit_ace import st_ace
import subprocess
import time
import re
import asyncio
import json
import uuid
import os
from code_assistant_runnable import get_runnable

from langchain_core.messages import SystemMessage, AIMessage, HumanMessage, ToolMessage 

# Set up page configuration
st.set_page_config(page_title="AI Code Editor", 
                   page_icon=":computer:", 
                   layout="wide")

@st.cache_resource
def create_code_assistant_instance():
    try:
        # Import torch first to ensure proper initialization
        import torch
        torch.set_grad_enabled(False)  # Disable gradients since we're only doing inference
        
        return get_runnable()
    except Exception as e:
        st.error(f"Error initializing chatbot: {str(e)}")
        return None

chatbot = create_code_assistant_instance()


# Initialize session states
if 'messages' not in st.session_state:
    st.session_state.messages = [
        AIMessage(content="Hello, I am your coding assistant. How can I help you?"),
    ]



if 'editor_code' not in st.session_state:
    st.session_state.editor_code = ''

# Constants
EDITOR_HEIGHT = 400
OUTPUT_HEIGHT = 150

# Minimal CSS for styling
st.markdown("""
    <style type="text/css">
        .output-container {
            background-color: rgba(17, 19, 23, 0.8);
            border-radius: 4px;
            padding: 1rem;
            margin-top: 0.5rem;
            min-height: 150px;
            color: white;
        }
        
        .placeholder-text {
            color: gray;
            font-style: italic;
        }
        
        /* Remove extra padding */
        .block-container {
            padding-top: 1rem !important;
        }
        
        /* Ensure chat messages are visible */
        .stChatMessage {
            background-color: rgba(17, 19, 23, 0.8) !important;
        }

        /* Style section headers consistently */
        .section-header {
            font-size: 1rem;
            margin-bottom: 1rem;
            color: rgb(250, 250, 250);
            font-weight: 500;
        }

        /* Ensure columns align at the top */
        .column-container {
            display: flex;
            align-items: flex-start;
        }
            
        /* Loading indicator styles */
        .loading-spinner {
            display: flex;
            align-items: center;
            gap: 0.5rem;
            padding: 0.5rem;
            border-radius: 0.25rem;
            background-color: rgba(17, 19, 23, 0.8);
        }
        
        .loading-text {
            color: #ffffff;
            font-size: 0.875rem;
        }
    </style>
""", unsafe_allow_html=True)


def analyze_code(code, language):
    """
    Basic code analysis function that looks for common issues
    """
    analysis = []
    
    if language == "python":
        # Check for basic Python issues
        if "while" in code and "break" not in code:
            analysis.append("⚠️ While loop detected without break condition - check for infinite loops")
        
        if "except:" in code and "except Exception:" not in code:
            analysis.append("⚠️ Bare except clause detected - consider catching specific exceptions")
        
        if "print" in code and "if __name__ == '__main__':" not in code:
            analysis.append("💡 Consider adding main guard for scripts with print statements")
        
        if re.search(r'^\s+', code, re.MULTILINE):
            analysis.append("🔍 Mixed indentation detected - check spacing")
            
    elif language == "rust":
        if "unwrap()" in code:
            analysis.append("⚠️ Usage of unwrap() detected - consider proper error handling")
        
        if "mut" not in code and len(code) > 50:
            analysis.append("💡 No mutable variables detected - verify if intentional")

    if not analysis:
        analysis.append("✅ No immediate issues detected in the code")
    
    return "\n".join(analysis)

def dummy_ai_response(question, code_context, language):
    """
    Dummy AI response function with basic code context awareness
    """
    time.sleep(1)  # Simulate processing time
    
    if "debug" in question.lower():
        return f"Here's my analysis of your {language} code:\n" + analyze_code(code_context, language)
    
    if "how" in question.lower() and "implement" in question.lower():
        return f"To implement this in {language}, you might want to consider:\n1. Breaking down the problem\n2. Using appropriate data structures\n3. Following {language} best practices"
    
    if "error" in question.lower() or "not working" in question.lower():
        return "Let me help you debug that. Could you:\n1. Share the specific error message\n2. Describe what you expected to happen\n3. Describe what actually happened"
    
    return f"I see you're working with {language}. Could you clarify what specific help you need with your code?"

def run_python_code(code):
    try:
        with open("temp_code.py", "w") as f:
            f.write(code)
        result = subprocess.run(["python", "temp_code.py"], 
                              capture_output=True, 
                              text=True)
        return result.stderr if result.stderr else result.stdout
    except Exception as e:
        return f"Error: {e}"

def run_rust_code(code):
    with open('code.rs', 'w') as file:
        file.write(code)
    
    compile_process = subprocess.Popen(['rustc', 'code.rs'], 
                                     stdout=subprocess.PIPE, 
                                     stderr=subprocess.PIPE, 
                                     text=True)
    compile_output, compile_errors = compile_process.communicate()
    
    if compile_process.returncode != 0:
        return f"Compilation Error: {compile_errors}"
    
    run_process = subprocess.Popen(['./code'], 
                                 stdout=subprocess.PIPE, 
                                 stderr=subprocess.PIPE, 
                                 text=True)
    run_output, run_errors = run_process.communicate()
    return run_output if not run_errors else run_errors

def run_js_code():

    pass

def dummy_auto_complete(code: str, language: str = None) -> str:
    """
    Dummy function to simulate LLM code completion
    Args:
        code (str): The incomplete code in the editor
        language (str, optional): Selected programming language
    Returns:
        str: The completed code
    """
    time.sleep(2)  # Simulate processing time
    
    # Example completions based on language
    completions = {
        "python": """# Function to calculate sum
def calculate_sum(a: int, b: int) -> int:
    '''Calculate sum of two integers'''
    return a + b""",
        
        "javascript": """// Function to calculate sum
function calculateSum(a, b) {
    return a + b;
}""",
        
        "rust": """// Function to calculate sum
fn calculate_sum(a: i32, b: i32) -> i32 {
    a + b
}"""
    }
    
    # Return language-specific completion or default to Python
    return completions.get(language, completions["python"])

# Sidebar settings
with st.sidebar:
    st.title("SolCoder")
    st.header("Solana AI Code Editor")
    theme = st.selectbox("Editor Theme",
        ["monokai", "github", "solarized_dark", "solarized_light", "dracula"])
    font_size = st.slider("Font Size", 12, 24, 14)
    show_gutter = st.checkbox("Show Line Numbers", value=True)
    language = st.selectbox("Language", ["python", "javascript", "rust"], index=0)

# Create two columns for main layout
col1, col2 = st.columns([3, 2])

# Left Column - Code Editor and Output
with col1:
    st.subheader("")
    st.subheader("Code Editor")
    st.markdown("Write your code below and use the buttons to run or debug")
    
    # Code editor
    editor = st_ace(
        value=st.session_state.editor_code,
        language=language,
        theme=theme,
        font_size=font_size,
        show_gutter=show_gutter,
        auto_update=True,
        height=EDITOR_HEIGHT,
        key="editor"
    )
    
    # Buttons - Modified to include three columns
    button_cols = st.columns(3)
    with button_cols[0]:
        auto_complete_btn = st.button("Auto-Complete", use_container_width=True)
    with button_cols[1]:
        run_btn = st.button("Run Code", use_container_width=True)
    with button_cols[2]:
        debug_btn = st.button("Debug Code", use_container_width=True)
    
    # Handle auto-complete button click
    if auto_complete_btn:
        with st.spinner("Generating code completion..."):
            try:
                # Get completed code from dummy function
                completed_code = dummy_auto_complete(st.session_state.editor_code, language)
                

                st.markdown(f'<div class="output-area">```{completed_code}```</div>', unsafe_allow_html=True)
                
                # # Update editor content in session state
                st.session_state.editor_code = completed_code
                
                # Show success message
                st.success("Code successfully completed!")
                
            except Exception as e:
                st.error(f"Error during code completion: {str(e)}")
    
    # Output area - simplified container structure
    if run_btn:
        output = run_python_code(editor) if language == "python" else \
                run_rust_code(editor) if language == "rust" else \
                "Currently, only Python and Rust execution is supported."
        st.markdown(f'<div class="output-area">{output}</div>', unsafe_allow_html=True)
    else:
        st.markdown('<div class="output-area placeholder-text">Code output will appear here...</div>', 
                   unsafe_allow_html=True)

def format_ai_response(response):
    """Format AI response into readable message"""
    if isinstance(response, dict):
        # Extract meaningful content from response structure
        if 'generation' in response:
            message = response['generation']
            # Parse structured response appropriately
            formatted_content = []
            if hasattr(message, 'prefix'):
                formatted_content.append(message.prefix)
            if hasattr(message, 'imports'):
                formatted_content.append(f"```\n{message.imports}\n```")
            if hasattr(message, 'code'):
                formatted_content.append(f"```\n{message.code}\n```")
            return "\n".join(formatted_content)
    return str(response)  # Fallback for simple responses


# Right Column - Chat Interface
with col2:
    # Match header styling with the code section
    # st.markdown('<p class="section-header">AI Assistant Chat</p>', unsafe_allow_html=True)
    st.subheader("")
    st.subheader("Code Assistant Agent")
    
    

    # conversation

    def validate_message(message):
        """Validate message before adding to history"""
        if not isinstance(message, (AIMessage, HumanMessage)):
            return False
        if not message.content or not isinstance(message.content, str):
            return False
        return True

    def add_message_to_history(message):
        """Safely add message to chat history"""
        if validate_message(message):
            st.session_state.messages.append(message)
            return True
        return False
    
    # Update message display section
    for message in st.session_state.messages:
        if isinstance(message, AIMessage):
            with st.chat_message("AI"):
                # Handle code blocks in message
                content = message.content
                if "```" in content:
                    parts = content.split("```")
                    for i, part in enumerate(parts):
                        if i % 2 == 0:  # Regular text
                            if part.strip():
                                st.markdown(part)
                        else:  # Code block
                            st.code(part)
                else:
                    st.markdown(content)
        elif isinstance(message, HumanMessage):
            with st.chat_message("Human"):
                st.markdown(message.content)
    

    
    # Clear chat button
    if st.button("Clear Chat", use_container_width=True):
        st.session_state.messages = []
        st.rerun()

    if prompt := st.chat_input("Ask about writing solana code..."):
        user_message = HumanMessage(content=prompt)
        
        # Add user message to history
        if add_message_to_history(user_message):
            with st.chat_message("AI"):
                # Create a placeholder for the loading indicator
                response_placeholder = st.empty()
                
                # Show loading message
                with response_placeholder:
                    with st.spinner("AI is thinking..."):
                        try:
                            # Get AI response
                            ai_response = chatbot.invoke({
                                "messages": [("user", prompt)], 
                                "iterations": 0, 
                                "error": ""
                            })
                            
                            # Format and add AI response
                            formatted_response = format_ai_response(ai_response)
                            ai_message = AIMessage(content=formatted_response)
                            
                            # Clear the loading indicator and show the response
                            response_placeholder.empty()
                            st.markdown(formatted_response)
                            
                            # Add to history
                            add_message_to_history(ai_message)
                            
                            # Only rerun after successful processing
                            st.rerun()
                            
                        except Exception as e:
                            response_placeholder.error(f"Error generating response: {str(e)}")