// IceYoo Desaru — Agent chat panel // Slide-up overlay that talks to the customer-facing agent at /api/chat/sync. // Phase 1: sync (non-streaming). Phase 1+ upgrades to SSE streaming. const { useState, useRef, useEffect } = React; // ── config ──────────────────────────────────────────────────── const CHAT_API_URL = window.__CHAT_API_URL__ || "http://localhost:8002/api/chat/sync"; const RESTAURANT_NAME = "IceYoo Desaru"; // ── icons ───────────────────────────────────────────────────── const IconChat = ({ size = 22, color = "#fff" }) => ( ); const IconClose = ({ size = 22 }) => ( ); const IconSend = ({ size = 20 }) => ( ); const IconSpinner = ({ size = 18 }) => ( ); // ── chat bubble ─────────────────────────────────────────────── const ChatBubble = ({ role, text, tools, error }) => { const isUser = role === "user"; return (
{text} {tools && tools.length > 0 && (
{tools.map((t, i) => ( {t.replace(/^mcp__/, "").replace(/__/, ".")} ))}
)}
); }; // ── chat panel (slide up from bottom) ──────────────────────── function ChatPanel({ open, onClose }) { const [messages, setMessages] = useState([ { role: "assistant", text: "Hey! Welcome to " + RESTAURANT_NAME + ". I can help you order from our menu — Iceyoo, Bingsu, Korean chicken, or smoothies. What sounds good?", }, ]); const [input, setInput] = useState(""); const [loading, setLoading] = useState(false); const [sessionId, setSessionId] = useState( () => window.localStorage.getItem("iceyoo_session_id") || null ); // ── VIP demo toggle ──────────────────────────────────────── // When ON, sends customer_id: "cust_sarah_001" so the agent loads // her profile from MemGC (after running scripts/seed-memgc-sarah.ts). // Defaults to ON so the demo opens with Sarah's profile pre-loaded. const [isVip, setIsVip] = useState(() => { const v = window.localStorage.getItem("iceyoo_is_vip"); return v === null ? true : v === "1"; }); const customerId = isVip ? "cust_sarah_001" : null; const scrollerRef = useRef(null); const inputRef = useRef(null); // auto-scroll on new message useEffect(() => { if (scrollerRef.current) { scrollerRef.current.scrollTop = scrollerRef.current.scrollHeight; } }, [messages, loading]); // focus input on open useEffect(() => { if (open && inputRef.current) { setTimeout(() => inputRef.current && inputRef.current.focus(), 200); } }, [open]); const send = async () => { const text = input.trim(); if (!text || loading) return; setInput(""); setMessages((m) => [...m, { role: "user", text }]); // Insert empty assistant message that gets filled by streaming deltas. setMessages((m) => [...m, { role: "assistant", text: "", streaming: true, tools: [] }]); setLoading(true); // Switch from /chat/sync to /chat (SSE). Same payload, streamed response. const STREAM_URL = CHAT_API_URL.replace(/\/sync$/, ""); try { const res = await fetch(STREAM_URL, { method: "POST", headers: { "Content-Type": "application/json", Accept: "text/event-stream" }, body: JSON.stringify({ message: text, session_id: sessionId, customer_id: customerId, channel: "mobile", }), }); if (!res.ok || !res.body) { const errBody = await res.text().catch(() => ""); setMessages((m) => { const next = [...m]; next[next.length - 1] = { role: "assistant", error: true, text: "Sorry, the assistant is unreachable right now (HTTP " + res.status + "). " + "Make sure the backend is running: " + "`cd ai-feedme && make up && make dev`", }; return next; }); return; } // Read SSE stream: event lines like `event: chunk` followed by `data: {...}`. const reader = res.body.getReader(); const decoder = new TextDecoder(); let buffer = ""; let currentEvent = "chunk"; let finalData = null; while (true) { const { value, done } = await reader.read(); if (done) break; buffer += decoder.decode(value, { stream: true }); const parts = buffer.split("\n\n"); buffer = parts.pop() || ""; for (const part of parts) { const lines = part.split("\n"); for (const line of lines) { if (line.startsWith("event:")) currentEvent = line.slice(6).trim(); else if (line.startsWith("data:")) { const raw = line.slice(5).trim(); if (!raw) continue; try { const data = JSON.parse(raw); if (currentEvent === "chunk" && data.delta) { setMessages((m) => { const next = [...m]; const last = next[next.length - 1]; next[next.length - 1] = { ...last, text: (last.text || "") + data.delta }; return next; }); } else if (currentEvent === "done") { finalData = data; } else if (currentEvent === "error") { setMessages((m) => { const next = [...m]; next[next.length - 1] = { role: "assistant", error: true, text: "Agent error: " + (data.message || data.error || "unknown"), }; return next; }); } } catch (e) { // ignore malformed SSE chunk } } } } } // Finalize: persist session id + tools metadata on the streamed message. if (finalData) { if (finalData.session_id) { setSessionId(finalData.session_id); window.localStorage.setItem("iceyoo_session_id", finalData.session_id); } setMessages((m) => { const next = [...m]; const last = next[next.length - 1]; next[next.length - 1] = { ...last, streaming: false, // If no chunks arrived (very short reply lost?), fall back to final output. text: last.text || finalData.output || "(empty response)", tools: finalData.tools_called || [], }; return next; }); } } catch (err) { setMessages((m) => { const next = [...m]; next[next.length - 1] = { role: "assistant", error: true, text: "Network error — couldn't reach the assistant at " + STREAM_URL + ". " + "Phase 1 ships this API; if it's not running yet, this is expected.", }; return next; }); } finally { setLoading(false); } }; const onKey = (e) => { if (e.key === "Enter" && !e.shiftKey) { e.preventDefault(); send(); } }; return ( {/* backdrop */}
{/* panel */}
{/* header */}
{RESTAURANT_NAME} Assistant
{loading ? "thinking…" : "online"}
{/* VIP demo toggle */}
{/* messages */}
{messages.map((m, i) => ( ))} {loading && (
thinking…
)}
{/* input */}