ngwakomadikwe commited on
Commit
fc0acea
·
verified ·
1 Parent(s): de4e354

Create app/routes/chat.py

Browse files
Files changed (1) hide show
  1. app/routes/chat.py +88 -0
app/routes/chat.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Chat routes for OpenAI integration
3
+ """
4
+ import logging
5
+ from flask import Blueprint, request, jsonify, current_app
6
+ from app.services.openai_service import OpenAIService
7
+
8
+ logger = logging.getLogger(__name__)
9
+ chat_bp = Blueprint('chat', __name__)
10
+
11
+ @chat_bp.route("/chat", methods=["POST"])
12
+ def chat():
13
+ """
14
+ Handle chat requests to OpenAI
15
+
16
+ Expected JSON payload:
17
+ {
18
+ "message": "User message",
19
+ "model": "gpt-3.5-turbo" (optional),
20
+ "temperature": 0.7 (optional),
21
+ "system_message": "System prompt" (optional)
22
+ }
23
+
24
+ Returns:
25
+ {
26
+ "reply": "AI response",
27
+ "model": "model_used",
28
+ "usage": {...}
29
+ }
30
+ """
31
+ try:
32
+ # Validate request
33
+ if not request.is_json:
34
+ return jsonify({"error": "Request must be JSON"}), 400
35
+
36
+ data = request.get_json()
37
+ if not data:
38
+ return jsonify({"error": "No JSON data provided"}), 400
39
+
40
+ # Extract parameters
41
+ user_message = data.get("message", "").strip()
42
+ model = data.get("model")
43
+ temperature = data.get("temperature")
44
+ system_message = data.get("system_message")
45
+
46
+ # Initialize OpenAI service
47
+ openai_service = OpenAIService(current_app.config['OPENAI_API_KEY'])
48
+
49
+ # Validate message
50
+ is_valid, error_msg = openai_service.validate_message(user_message)
51
+ if not is_valid:
52
+ return jsonify({"error": error_msg}), 400
53
+
54
+ logger.info(f"Processing chat request for message length: {len(user_message)}")
55
+
56
+ # Generate response
57
+ result = openai_service.chat_completion(
58
+ message=user_message,
59
+ model=model,
60
+ temperature=temperature,
61
+ system_message=system_message
62
+ )
63
+
64
+ return jsonify(result), 200
65
+
66
+ except Exception as e:
67
+ logger.error(f"Chat endpoint error: {str(e)}")
68
+ return jsonify({
69
+ "error": "Failed to process chat request",
70
+ "message": str(e)
71
+ }), 500
72
+
73
+ @chat_bp.route("/chat/models", methods=["GET"])
74
+ def available_models():
75
+ """
76
+ Get list of available OpenAI models
77
+ """
78
+ models = [
79
+ "gpt-3.5-turbo",
80
+ "gpt-3.5-turbo-16k",
81
+ "gpt-4",
82
+ "gpt-4-turbo-preview"
83
+ ]
84
+
85
+ return jsonify({
86
+ "models": models,
87
+ "default": "gpt-3.5-turbo"
88
+ }), 200