Upload 2 files
Browse files- chatbot.py +27 -0
- main.py +17 -0
chatbot.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import anthropic
|
2 |
+
|
3 |
+
# Function to interact with Anthropi API and generate response
|
4 |
+
def get_anthropic_response(question):
|
5 |
+
# Initialize the Anthropi client with your API key
|
6 |
+
client = anthropic.Anthropic(api_key="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")
|
7 |
+
|
8 |
+
# Specify parameters for text generation
|
9 |
+
model_name = "claude-3-opus-20240229"
|
10 |
+
max_tokens = 1024
|
11 |
+
|
12 |
+
# Generate response using the specified model and parameters
|
13 |
+
message = client.messages.create(
|
14 |
+
model=model_name,
|
15 |
+
max_tokens=max_tokens,
|
16 |
+
messages=[{"role": "user", "content": question}]
|
17 |
+
)
|
18 |
+
|
19 |
+
# Extract and return the generated response
|
20 |
+
return message.content
|
21 |
+
|
22 |
+
# Function to simulate a chatbot interaction
|
23 |
+
def chatbot(user_message):
|
24 |
+
# Use the Anthropi model to generate a response
|
25 |
+
response = get_anthropic_response(user_message)
|
26 |
+
print(response)
|
27 |
+
return response[0].text # Directly return the generated text
|
main.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from fastapi import FastAPI
|
3 |
+
from chatbot import chatbot
|
4 |
+
|
5 |
+
app = FastAPI()
|
6 |
+
|
7 |
+
# Endpoint to handle chat requests
|
8 |
+
@app.post("/chat", response_model=str)
|
9 |
+
def chat_endpoint(chat_request: str):
|
10 |
+
return chatbot(chat_request)
|
11 |
+
|
12 |
+
def main():
|
13 |
+
# Launch Gradio interface for the chatbot
|
14 |
+
gr.Interface(fn=chatbot, inputs="text", outputs="text", title="Chatbot").launch(share=True)
|
15 |
+
|
16 |
+
if __name__ == "__main__":
|
17 |
+
main()
|