Spaces:
				
			
			
	
			
			
					
		Running
		
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | 
         @@ -226,7 +226,7 @@ tools.append(shell_tool) 
     | 
|
| 226 | 
         
             
            # ββββββββββββββββββββββββββββββ
         
     | 
| 227 | 
         
             
            def chat(message, history):
         
     | 
| 228 | 
         
             
                llm = ChatOpenAI(
         
     | 
| 229 | 
         
            -
                model="openai/gpt-4.1- 
     | 
| 230 | 
         
             
                openai_api_key="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",
         
     | 
| 231 | 
         
             
                openai_api_base="https://models.github.ai/inference",  # π μ΄κ² base_url μν 
         
     | 
| 232 | 
         
             
                )
         
     | 
| 
         @@ -263,7 +263,7 @@ def chat(message, history): 
     | 
|
| 263 | 
         
             
                try:
         
     | 
| 264 | 
         
             
                    raw = agent.invoke(ab)["output"]
         
     | 
| 265 | 
         
             
                except:
         
     | 
| 266 | 
         
            -
                    llm = ChatOpenAI(model="openai/gpt-4.1- 
     | 
| 267 | 
         
             
                    agent=initialize_agent(tools,llm,agent_type=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,verbose=True,handle_parsing_errors=True)
         
     | 
| 268 | 
         
             
                    raw=agent.invoke(ab)["output"]
         
     | 
| 269 | 
         
             
                try:
         
     | 
| 
         | 
|
| 226 | 
         
             
            # ββββββββββββββββββββββββββββββ
         
     | 
| 227 | 
         
             
            def chat(message, history):
         
     | 
| 228 | 
         
             
                llm = ChatOpenAI(
         
     | 
| 229 | 
         
            +
                model="openai/gpt-4.1-mini",
         
     | 
| 230 | 
         
             
                openai_api_key="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",
         
     | 
| 231 | 
         
             
                openai_api_base="https://models.github.ai/inference",  # π μ΄κ² base_url μν 
         
     | 
| 232 | 
         
             
                )
         
     | 
| 
         | 
|
| 263 | 
         
             
                try:
         
     | 
| 264 | 
         
             
                    raw = agent.invoke(ab)["output"]
         
     | 
| 265 | 
         
             
                except:
         
     | 
| 266 | 
         
            +
                    llm = ChatOpenAI(model="openai/gpt-4.1-mini",openai_api_key="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",openai_api_base="https://models.github.ai/inference")
         
     | 
| 267 | 
         
             
                    agent=initialize_agent(tools,llm,agent_type=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,verbose=True,handle_parsing_errors=True)
         
     | 
| 268 | 
         
             
                    raw=agent.invoke(ab)["output"]
         
     | 
| 269 | 
         
             
                try:
         
     |