import gradio as gr import openai # Replace with your actual OpenAI API key openai.api_key = "sk-proj-zVyjTUhcNh6MVzZpoHh3T3BlbkFJzsEfIBENz7WlbVIKv5tS" def get_python_help(message, history): if history is None: history = [] try: # Construct the prompt prompt = f"""You are a helpful assistant that teaches Python programming. You always refer to the latest Python documentation when answering questions. If you're not sure about something, you say so and suggest checking the official documentation. User: {message} Assistant:""" # Get the full conversation history full_history = "\n".join([f"Human: {h[0]}\nAssistant: {h[1]}" for h in history]) # Append the full history to the prompt full_prompt = f"{full_history}\n{prompt}" # Generate a response using the updated method response = openai.Completion.create( model="gpt-3.5-turbo", # Replace with the appropriate model name prompt=full_prompt, max_tokens=500, n=1, stop=None, temperature=0.7, ) # Extract and return the generated message return response.choices[0].text.strip(), history + [(message, response.choices[0].text.strip())] except Exception as e: return f"Error: {str(e)}", history # Create the Gradio interface iface = gr.Interface( fn=get_python_help, inputs=["text", "state"], outputs=["text", "state"], title="Python Learning Assistant", description="Ask questions about Python programming and get help based on the latest documentation.", examples=[ ["How do I create a list in Python?"], ["Explain Python decorators"], ["What's new in Python 3.10?"], ["How do I use f-strings?"], ["Explain the difference between tuples and lists"] ], theme="default" ) # Launch the app iface.launch()