File size: 6,218 Bytes
27127dd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
import { useState, useEffect, useRef, useCallback } from 'react';
import { apiRequest } from './queryClient';
import { Message, ChatResponse, ModelInfo } from './types';

// Hook for managing chat state
export function useChat(initialConversationId = "default") {
  const [messages, setMessages] = useState<Message[]>([
    {
      role: "assistant",
      content: "Hello! I'm your AI assistant. How can I help you today?"
    }
  ]);
  const [isLoading, setIsLoading] = useState(false);
  const [error, setError] = useState<string | null>(null);
  const [conversationId, setConversationId] = useState(initialConversationId);
  const [currentModel, setCurrentModel] = useState<'openai' | 'qwen' | 'unavailable'>('openai');
  const [isConnected, setIsConnected] = useState(true);

  // Load message history for a conversation
  const loadMessages = useCallback(async (convId: string) => {
    try {
      setIsLoading(true);
      setError(null);
      const response = await fetch(`/api/conversations/${convId}/messages`, {
        credentials: 'include',
        headers: {
          'Accept': 'application/json',
          'Content-Type': 'application/json'
        }
      });
      
      if (!response.ok) {
        if (response.status === 404) {
          // If conversation not found, reset to welcome message
          setMessages([
            {
              role: "assistant",
              content: "Hello! I'm your AI assistant. How can I help you today?"
            }
          ]);
          return;
        }
        throw new Error('Failed to load message history');
      }
      
      const data = await response.json() as Message[];
      if (data.length > 0) {
        setMessages(data);
      } else {
        // Reset to welcome message if no messages
        setMessages([
          {
            role: "assistant",
            content: "Hello! I'm your AI assistant. How can I help you today?"
          }
        ]);
      }
    } catch (err: any) {
      setError(err.message);
      console.error('Error loading messages:', err);
    } finally {
      setIsLoading(false);
    }
  }, []);

  // Auto-load messages when conversation ID changes
  useEffect(() => {
    const fetchMessages = async () => {
      if (conversationId) {
        try {
          await loadMessages(conversationId);
        } catch (error) {
          console.error("Failed to load messages:", error);
        }
      }
    };
    
    fetchMessages();
  }, [conversationId]);

  // Send a message to the API
  const sendMessage = useCallback(async (content: string) => {
    if (!content.trim()) return;
    
    try {
      setIsLoading(true);
      setError(null);
      
      // Add user message to state immediately for UI
      const userMessage: Message = {
        role: "user",
        content
      };
      
      setMessages(prev => [...prev, userMessage]);
      
      // Prepare message history for API
      // Filter out any messages without content or role
      const messageHistory = messages
        .filter(msg => msg.content && msg.role) 
        .map(msg => ({
          role: msg.role,
          content: msg.content
        }));
      
      // Add the new user message
      messageHistory.push({
        role: userMessage.role,
        content: userMessage.content
      });
      
      // Make API request
      const response = await apiRequest('POST', '/api/chat', {
        messages: messageHistory,
        conversationId
      });
      
      const data = await response.json() as ChatResponse;
      
      // Add the assistant's response to state
      setMessages(prev => [...prev, data.message]);
      
      // Update current model if provided
      if (data.modelInfo) {
        setCurrentModel(data.modelInfo.model);
      }
      
      // Auto-refresh message history to sync with server
      setTimeout(() => {
        loadMessages(conversationId);
      }, 1000);
      
    } catch (err: any) {
      let errorMessage = err.message || 'Failed to send message';
      
      // Check if it's a quota exceeded error and provide a more friendly message
      if (errorMessage.includes('quota exceeded') || errorMessage.includes('insufficient_quota')) {
        errorMessage = "The OpenAI API quota has been exceeded. This often happens with free accounts. The system will attempt to use the Qwen fallback model.";
      }
      
      setError(errorMessage);
      console.error('Error sending message:', err);
    } finally {
      setIsLoading(false);
    }
  }, [messages, conversationId, loadMessages]);

  // Clear the current conversation
  const clearConversation = useCallback(() => {
    setMessages([
      {
        role: "assistant",
        content: "Hello! I'm your AI assistant. How can I help you today?"
      }
    ]);
  }, []);

  // Check connection status
  useEffect(() => {
    const checkConnection = async () => {
      try {
        const response = await fetch('/api/health');
        setIsConnected(response.ok);
      } catch (err) {
        setIsConnected(false);
      }
    };
    
    checkConnection();
    const interval = setInterval(checkConnection, 30000);
    return () => clearInterval(interval);
  }, []);

  // Also check model status periodically
  useEffect(() => {
    const checkModelStatus = async () => {
      try {
        const response = await fetch('/api/model-status');
        if (response.ok) {
          const data = await response.json();
          setCurrentModel(data.model);
        }
      } catch (err) {
        console.error('Error checking model status:', err);
      }
    };
    
    checkModelStatus();
    const interval = setInterval(checkModelStatus, 5 * 60 * 1000);
    return () => clearInterval(interval);
  }, []);

  return {
    messages,
    isLoading,
    error,
    conversationId,
    isConnected,
    currentModel,
    sendMessage,
    clearConversation,
    loadMessages,
    setConversationId
  };
}

// Hook to scroll to bottom of chat
export function useScrollToBottom(dependency: any) {
  const ref = useRef<HTMLDivElement>(null);
  
  useEffect(() => {
    if (ref.current) {
      ref.current.scrollTop = ref.current.scrollHeight;
    }
  }, [dependency]);
  
  return ref;
}