Abmacode12 commited on
Commit
71032d7
·
verified ·
1 Parent(s): b99465c

Fonctionnel :

Browse files

+ = ouvrir un sélecteur de fichiers

prise = bouton “connecter des applications”

micro = dictée vocale (Web Speech API)

flèche = envoyer

Placeholder : “Envoyer un message à Espace Codage”

Au-dessus : panneau “Voir l’ordinateur de Espace Codage” qui défile (logs)

"use client";

import React, { useEffect, useMemo, useRef, useState } from "react";

type Props = {
onSend: (text: string, files?: File[]) => Promise<void> | void;
onConnectApps?: () => void;
};

type SpeechState = "idle" | "listening" | "unsupported" | "denied";

export default function ChatFooterBar({ onSend, onConnectApps }: Props) {
const [text, setText] = useState("");
const [files, setFiles] = useState<File[]>([]);
const [speech, setSpeech] = useState<SpeechState>("idle");
const [logs, setLogs] = useState<string[]>([
"Espace Codage • Démarrage…",
"Chargement des services…",
"Prêt ✅"
]);

const fileInputRef = useRef<HTMLInputElement | null>(null);
const logsRef = useRef<HTMLDivElement | null>(null);

// ---- “Ordinateur” qui défile (simulation de flux)
useEffect(() => {
const t = setInterval(() => {
setLogs((prev) => {
const next = [...prev, `Console • ${new Date().toLocaleTimeString()} • activité…`];
return next.length > 200 ? next.slice(-200) : next;
});
}, 1400);
return () => clearInterval(t);
}, []);

useEffect(() => {
if (!logsRef.current) return;
logsRef.current.scrollTop = logsRef.current.scrollHeight;
}, [logs]);

// ---- Dictée vocale (Web Speech API)
const SpeechRecognitionImpl = useMemo(() => {
if (typeof window === "undefined") return null;
const w = window as any;
return w.SpeechRecognition || w.webkitSpeechRecognition || null;
}, []);

const startDictation = async () => {
if (!SpeechRecognitionImpl) {
setSpeech("unsupported");
return;
}

// Certains navigateurs demandent une permission micro. On démarre simplement la reco.
const recog = new SpeechRecognitionImpl();
recog.lang = "fr-FR";
recog.interimResults = true;
recog.continuous = true;

setSpeech("listening");

let finalText = "";

recog.onresult = (event: any) => {
let interim = "";
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript;
if (event.results[i].isFinal) finalText += transcript;
else interim += transcript;
}
setText((prev) => {
const base = prev.trim().length ? prev.trim() + " " : "";
const merged = (base + (finalText + interim)).replace(/\s+/g, " ").trim();
return merged;
});
};

recog.onerror = (e: any) => {
// not-allowed / service-not-allowed / audio-capture, etc.
if (String(e?.error).includes("not-allowed") || String(e?.error).includes("service-not-allowed")) {
setSpeech("denied");
} else {
setSpeech("idle");
}
try { recog.stop(); } catch {}
};

recog.onend = () => {
setSpeech((s) => (s === "listening" ? "idle" : s));
};

try {
recog.start();
} catch {
setSpeech("idle");
}
};

const stopDictation = () => {
// On ne garde pas l’instance globale ici. UX simple : re-cliquer micro stoppe via rechargement de state.
// Si tu veux un stop “dur”, on peut stocker l’instance dans un ref.
setSpeech("idle");
};

const handlePickFiles = () => fileInputRef.current?.click();

const handleFilesChanged = (e: React.ChangeEvent<HTMLInputElement>) => {
const list = Array.from(e.target.files ?? []);
setFiles(list);
};

const handleSend = async () => {
const msg = text.trim();
if (!msg && files.length === 0) return;

await onSend(msg, files);

setText("");
setFiles([]);
if (fileInputRef.current) fileInputRef.current.value = "";
};

return (
<div className="w-full">
{/* --- Petit écran au-dessus (ordinateur) --- */}
<div className="mb-2 rounded-xl border border-white/10 bg-white/5 overflow-hidden">
<div className="flex items-center justify-between px-3 py-2 border-b border-white/10">
<div className="text-xs font-semibold text-white/80">Voir l’ordinateur de Espace Codage</div>
<div className="text-[11px] text-white/50">
{speech === "listening" ? "Micro actif…" : "En ligne"}
</div>
</div>

<div
ref={logsRef}
className="h-28 overflow-auto px-3 py-2 text-[11px] leading-5 text-white/70"
>
{logs.map((l, idx) => (
<div key={idx} className="whitespace-pre-wrap">
{l}
</div>
))}
</div>
</div>

{/* --- Barre du bas (comme la capture) --- */}
<div className="flex items-center gap-2 rounded-xl border border-white/10 bg-white/5 px-2 py-2">
{/* + Fichiers */}
<button
type="button"
onClick={handlePickFiles}
className="h-9 w-9 grid place-items-center rounded-lg hover:bg-white/10 border border-white/10"
aria-label="Ajouter des fichiers"
title="Ajouter des fichiers"
>
<PlusIcon />
</button>

{/* Connecter des apps (prise) */}
<button
type="button"
onClick={onConnectApps}
className="h-9 w-9 grid place-items-center rounded-lg hover:bg-white/10 border border-white/10"
aria-label="Connecter des applications"
title="Connecter des applications"
>
<PlugIcon />
</button>

{/* Champ texte */}
<div className="flex-1">
<input
value={text}
onChange={(e) => setText(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
void handleSend();
}
}}
className="w-full bg-transparent outline-none text-sm px-2 py-2 text-white placeholder:text-white/50"
placeholder="Envoyer un message à Espace Codage"
/>
{!!files.length && (
<div className="px-2 pt-1 text-[11px] text-white/60">
{files.length} fichier(s) ajouté(s)
</div>
)}
</div>

{/* Micro dictée */}
<button
type="button"
onClick={() => (speech === "listening" ? stopDictation() : startDictation())}
className={[
"h-9 w-9 grid place-items-center rounded-lg border border-white/10",
speech === "listening" ? "bg-white text-black" : "hover:bg-white/10"
].join(" ")}
aria-label="Saisie vocale"
title={
speech === "unsupported"
? "Dictée vocale non supportée"
: speech === "denied"
? "Permission micro refusée"
: "Saisie vocale"
}
>
<MicIcon active={speech === "listening"} />
</button>

{/* Envoyer */}
<button
type="button"
onClick={() => void handleSend()}
className="h-9 w-9 grid place-items-center rounded-lg bg-white/10 hover:bg-white/15 border border-white/10"
aria-label="Envoyer"
title="Envoyer"
>
<SendIcon />
</button>

<input
ref={fileInputRef}
type="file"
multiple
className="hidden"
onChange={handleFilesChanged}
/>
</div>
</div>
);
}

/* --- Icônes inline (pas de dépendances) --- */

function PlusIcon() {
return (
<svg width="18" height="18" viewBox="0 0 24 24" fill="none">
<path d="M12 5v14M5 12h14" stroke="currentColor" strokeWidth="2" strokeLinecap="round" />
</svg>
);
}

function PlugIcon() {
return (
<svg width="18" height="18" viewBox="0 0 24 24" fill="none">
<path
d="M9 7v5M15 7v5M7 12h10M10 12v4a4 4 0 0 0 4 4h1"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
/>
<path
d="M6 7h12"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
/>
</svg>
);
}

function MicIcon({ active }: { active: boolean }) {
return (
<svg width="18" height="18" viewBox="0 0 24 24" fill="none">
<path
d="M12 14a3 3 0 0 0 3-3V7a3 3 0 0 0-6 0v4a3 3 0 0 0 3 3Z"
stroke="currentColor"
strokeWidth="2"
/>
<path
d="M19 11a7 7 0 0 1-14 0"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
/>
<path
d="M12 18v3"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
/>
<path
d="M8 21h8"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
/>
{active ? (
<path
d="M4 4l16 16"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
opacity="0.25"
/>
) : null}
</svg>
);
}

function SendIcon() {
return (
<svg width="18" height="18" viewBox="0 0 24 24" fill="none">
<path
d="M5 12h12"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
/>
<path
d="M13 6l6 6-6 6"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}

2) Exemple d’intégration dans ton chat : components/ChatPanel.tsx

Ici tu branches ton système existant (onSend).
Le code ci-dessous montre comment récupérer texte + fichiers.

"use client";

import React, { useState } from "react";
import ChatFooterBar from "./ChatFooterBar";

export default function ChatPanel() {
const [messages, setMessages] = useState<{ role: "user" | "assistant"; content: string }[]>([
{ role: "assistant", content: "Envoyez-moi un message." }
]);

return (
<div className="h-full flex flex-col">
<div className="flex-1 overflow-auto p-3 space-y-2">
{messages.map((m, i) => (

Files changed (2) hide show
  1. components/chat.js +102 -34
  2. style.css +39 -1
components/chat.js CHANGED
@@ -3,16 +3,18 @@ class CustomChat extends HTMLElement {
3
  constructor() {
4
  super();
5
  this._logs = [
6
- "Espace Codage • Démarrage...",
7
- "Chargement des services...",
8
- "Prêt "
 
9
  ];
10
  this._messages = [
11
- { role: "assistant", content: "Bonjour ! Je suis Rosalinda, votre IA personnelle. Comment puis-je vous aider aujourd'hui ?" }
12
  ];
 
 
13
  }
14
-
15
- connectedCallback() {
16
  this.attachShadow({ mode: 'open' });
17
  this.shadowRoot.innerHTML = `
18
  <style>
@@ -190,7 +192,6 @@ class CustomChat extends HTMLElement {
190
  this._setupEventListeners();
191
  this._startLogsSimulation();
192
  }
193
-
194
  _setupEventListeners() {
195
  const shadow = this.shadowRoot;
196
  const fileButton = shadow.getElementById('fileButton');
@@ -202,53 +203,110 @@ class CustomChat extends HTMLElement {
202
  const filesInfo = shadow.getElementById('filesInfo');
203
  const micStatus = shadow.getElementById('micStatus');
204
 
205
- let files = [];
206
  let recognition;
207
-
208
  // File upload
209
  fileButton.addEventListener('click', () => fileInput.click());
210
  fileInput.addEventListener('change', (e) => {
211
- files = Array.from(e.target.files);
212
- filesInfo.textContent = files.length ? `${files.length} fichier(s) sélectionné(s)` : '';
 
213
  });
214
-
215
  // Connect apps
216
  connectButton.addEventListener('click', () => {
217
- this._addMessage('assistant', 'Ouverture: connecter des applications...');
 
 
 
218
  });
219
-
220
  // Speech recognition
221
  micButton.addEventListener('click', () => {
222
- if ('webkitSpeechRecognition' in window || 'SpeechRecognition' in window) {
223
- if (micButton.classList.contains('listening')) {
224
- this._stopSpeechRecognition(recognition, micButton, micStatus);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
225
  } else {
226
- recognition = this._startSpeechRecognition(micButton, micStatus, messageInput);
227
  }
228
- } else {
229
- micStatus.textContent = 'Micro non supporté';
230
  setTimeout(() => micStatus.textContent = 'En ligne', 2000);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231
  }
232
  });
233
-
234
  // Send message
235
  const sendMessage = () => {
236
  const message = messageInput.value.trim();
237
- if (message || files.length) {
238
- this._addMessage('user', message + (files.length ? ` (${files.length} fichier(s))` : ''));
 
 
 
239
  messageInput.value = '';
240
- files = [];
241
  filesInfo.textContent = '';
242
  fileInput.value = '';
243
 
244
- // Simulate Rosalinda's response
245
  setTimeout(() => {
246
- this._addMessage('assistant', 'Message reçu. Je traite votre demande...');
247
- }, 1000);
 
 
 
 
 
 
 
248
  }
249
  };
250
-
251
- sendButton.addEventListener('click', sendMessage);
252
  messageInput.addEventListener('keydown', (e) => {
253
  if (e.key === 'Enter') sendMessage();
254
  });
@@ -268,18 +326,28 @@ class CustomChat extends HTMLElement {
268
  `).join('');
269
  chatContainer.scrollTop = chatContainer.scrollHeight;
270
  }
271
-
272
  _startLogsSimulation() {
273
  const logsContainer = this.shadowRoot.getElementById('logsContainer');
274
  setInterval(() => {
275
- this._logs.push(`Console ${new Date().toLocaleTimeString()} • activité...`);
 
 
 
 
 
 
 
 
 
276
  if (this._logs.length > 20) this._logs.shift();
277
- logsContainer.innerHTML = this._logs.map(log => `<div class="log-line">${log}</div>`).join('');
 
 
 
278
  logsContainer.scrollTop = logsContainer.scrollHeight;
279
  }, 1400);
280
  }
281
-
282
- _startSpeechRecognition(micButton, micStatus, messageInput) {
283
  const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
284
  const recognition = new SpeechRecognition();
285
  recognition.lang = 'fr-FR';
 
3
  constructor() {
4
  super();
5
  this._logs = [
6
+ "Rosalinda • Démarrage...",
7
+ "Chargement des services IA...",
8
+ "Connexion à l'espace de codage...",
9
+ "Prêt à aider ✅"
10
  ];
11
  this._messages = [
12
+ { role: "assistant", content: "Bonjour ! Je suis Rosalinda, votre IA de création. Je peux vous aider avec des projets, du code, des images et des vidéos. Que souhaitez-vous créer aujourd'hui ?" }
13
  ];
14
+ this._files = [];
15
+ this._speechState = "idle"; // idle, listening, unsupported, denied
16
  }
17
+ connectedCallback() {
 
18
  this.attachShadow({ mode: 'open' });
19
  this.shadowRoot.innerHTML = `
20
  <style>
 
192
  this._setupEventListeners();
193
  this._startLogsSimulation();
194
  }
 
195
  _setupEventListeners() {
196
  const shadow = this.shadowRoot;
197
  const fileButton = shadow.getElementById('fileButton');
 
203
  const filesInfo = shadow.getElementById('filesInfo');
204
  const micStatus = shadow.getElementById('micStatus');
205
 
 
206
  let recognition;
 
207
  // File upload
208
  fileButton.addEventListener('click', () => fileInput.click());
209
  fileInput.addEventListener('change', (e) => {
210
+ this._files = Array.from(e.target.files);
211
+ filesInfo.textContent = this._files.length ?
212
+ `${this._files.length} fichier(s) sélectionné(s)` : '';
213
  });
 
214
  // Connect apps
215
  connectButton.addEventListener('click', () => {
216
+ this._addMessage('assistant',
217
+ 'Ouverture des connexions d\'applications... Je peux me connecter à:\n' +
218
+ '- Votre espace de stockage\n- Outils de création\n- API externes\n' +
219
+ 'Quel service souhaitez-vous connecter?');
220
  });
 
221
  // Speech recognition
222
  micButton.addEventListener('click', () => {
223
+ if (this._speechState === 'listening') {
224
+ this._stopSpeechRecognition(recognition, micButton, micStatus);
225
+ return;
226
+ }
227
+
228
+ const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
229
+ if (!SpeechRecognition) {
230
+ this._speechState = 'unsupported';
231
+ micStatus.textContent = 'Micro non supporté';
232
+ setTimeout(() => micStatus.textContent = 'En ligne', 2000);
233
+ return;
234
+ }
235
+
236
+ recognition = new SpeechRecognition();
237
+ recognition.lang = 'fr-FR';
238
+ recognition.interimResults = true;
239
+ recognition.continuous = true;
240
+
241
+ recognition.onstart = () => {
242
+ this._speechState = 'listening';
243
+ micButton.classList.add('listening');
244
+ micStatus.textContent = 'Micro actif...';
245
+ };
246
+
247
+ recognition.onresult = (event) => {
248
+ let transcript = '';
249
+ for (let i = event.resultIndex; i < event.results.length; i++) {
250
+ if (event.results[i].isFinal) {
251
+ transcript += event.results[i][0].transcript;
252
+ }
253
+ }
254
+ messageInput.value = transcript;
255
+ };
256
+
257
+ recognition.onerror = (event) => {
258
+ if (event.error === 'not-allowed') {
259
+ this._speechState = 'denied';
260
+ micStatus.textContent = 'Micro bloqué';
261
  } else {
262
+ this._speechState = 'idle';
263
  }
264
+ micButton.classList.remove('listening');
 
265
  setTimeout(() => micStatus.textContent = 'En ligne', 2000);
266
+ };
267
+
268
+ recognition.onend = () => {
269
+ if (this._speechState === 'listening') {
270
+ this._speechState = 'idle';
271
+ micButton.classList.remove('listening');
272
+ micStatus.textContent = 'En ligne';
273
+ }
274
+ };
275
+
276
+ try {
277
+ recognition.start();
278
+ } catch (err) {
279
+ this._speechState = 'idle';
280
+ micStatus.textContent = 'Erreur micro';
281
  }
282
  });
 
283
  // Send message
284
  const sendMessage = () => {
285
  const message = messageInput.value.trim();
286
+ if (message || this._files.length) {
287
+ const fileInfo = this._files.length ?
288
+ `\n[Fichiers joints: ${this._files.map(f => f.name).join(', ')}]` : '';
289
+
290
+ this._addMessage('user', message + fileInfo);
291
  messageInput.value = '';
292
+ this._files = [];
293
  filesInfo.textContent = '';
294
  fileInput.value = '';
295
 
296
+ // Simulate Rosalinda's intelligent response
297
  setTimeout(() => {
298
+ const responses = [
299
+ "J'ai bien reçu votre demande. Je travaille sur une solution créative...",
300
+ "Analyse en cours. Je prépare quelque chose d'unique pour vous...",
301
+ "Génération en cours. Voici ce que je propose pour votre projet:",
302
+ "J'ai une idée intéressante pour votre demande. Voici mes suggestions:"
303
+ ];
304
+ const randomResponse = responses[Math.floor(Math.random() * responses.length)];
305
+ this._addMessage('assistant', randomResponse);
306
+ }, 1500);
307
  }
308
  };
309
+ sendButton.addEventListener('click', sendMessage);
 
310
  messageInput.addEventListener('keydown', (e) => {
311
  if (e.key === 'Enter') sendMessage();
312
  });
 
326
  `).join('');
327
  chatContainer.scrollTop = chatContainer.scrollHeight;
328
  }
 
329
  _startLogsSimulation() {
330
  const logsContainer = this.shadowRoot.getElementById('logsContainer');
331
  setInterval(() => {
332
+ const actions = [
333
+ 'Analyse du projet',
334
+ 'Optimisation des ressources',
335
+ 'Génération de contenu',
336
+ 'Connexion aux services',
337
+ 'Préparation des résultats'
338
+ ];
339
+ const randomAction = actions[Math.floor(Math.random() * actions.length)];
340
+
341
+ this._logs.push(`Rosalinda • ${new Date().toLocaleTimeString()} • ${randomAction}...`);
342
  if (this._logs.length > 20) this._logs.shift();
343
+
344
+ logsContainer.innerHTML = this._logs.map(log =>
345
+ `<div class="log-line">${log}</div>`
346
+ ).join('');
347
  logsContainer.scrollTop = logsContainer.scrollHeight;
348
  }, 1400);
349
  }
350
+ _startSpeechRecognition(micButton, micStatus, messageInput) {
 
351
  const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
352
  const recognition = new SpeechRecognition();
353
  recognition.lang = 'fr-FR';
style.css CHANGED
@@ -5,7 +5,6 @@ body {
5
  overflow: hidden;
6
  font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
7
  }
8
-
9
  /* Rosalinda IA Chat Styles */
10
  custom-chat {
11
  flex: 1;
@@ -13,6 +12,45 @@ custom-chat {
13
  flex-direction: column;
14
  background: #0b0f19;
15
  color: white;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  }
17
  custom-sidebar {
18
  width: 280px;
 
5
  overflow: hidden;
6
  font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
7
  }
 
8
  /* Rosalinda IA Chat Styles */
9
  custom-chat {
10
  flex: 1;
 
12
  flex-direction: column;
13
  background: #0b0f19;
14
  color: white;
15
+ position: relative;
16
+ }
17
+
18
+ .user-message {
19
+ background: #3b82f6;
20
+ color: white;
21
+ margin-left: auto;
22
+ border-radius: 1rem 1rem 0 1rem;
23
+ max-width: 80%;
24
+ padding: 0.75rem 1rem;
25
+ margin-bottom: 0.5rem;
26
+ }
27
+
28
+ .ai-message {
29
+ background: #1e293b;
30
+ color: white;
31
+ margin-right: auto;
32
+ border-radius: 1rem 1rem 1rem 0;
33
+ max-width: 80%;
34
+ padding: 0.75rem 1rem;
35
+ margin-bottom: 0.5rem;
36
+ }
37
+
38
+ .mic-button.listening {
39
+ background-color: #ef4444 !important;
40
+ color: white !important;
41
+ animation: pulse 1.5s infinite;
42
+ }
43
+
44
+ @keyframes pulse {
45
+ 0% { opacity: 1; }
46
+ 50% { opacity: 0.7; }
47
+ 100% { opacity: 1; }
48
+ }
49
+
50
+ .file-info {
51
+ font-size: 0.75rem;
52
+ color: #94a3b8;
53
+ margin-top: 0.25rem;
54
  }
55
  custom-sidebar {
56
  width: 280px;