A React Native app for the ultimate thinking partner.
at main 9.0 kB view raw
1import { useCallback, useRef } from 'react'; 2import { useChatStore } from '../stores/chatStore'; 3import { useAgentStore } from '../stores/agentStore'; 4import lettaApi from '../api/lettaApi'; 5import type { StreamingChunk, LettaMessage } from '../types/letta'; 6 7/** 8 * Hook to handle streaming message sending 9 */ 10export function useMessageStream() { 11 const chatStore = useChatStore(); 12 const coAgent = useAgentStore((state) => state.coAgent); 13 14 // Track last message ID to detect when a new message starts 15 const lastMessageIdRef = useRef<string | null>(null); 16 17 // Handle individual streaming chunks - ULTRA SIMPLE 18 const handleStreamingChunk = useCallback((chunk: StreamingChunk) => { 19 const chunkType = chunk.message_type; 20 const chunkId = (chunk as any).id; 21 22 // Skip non-content chunks 23 if (chunkType === 'stop_reason' || chunkType === 'usage_statistics') { 24 return; 25 } 26 27 // Handle errors 28 if ((chunk as any).error) { 29 console.error('❌ Stream error:', (chunk as any).error); 30 return; 31 } 32 33 console.log(`📦 [${chunkType}] ID: ${chunkId?.substring(0, 8)}...`); 34 35 // DETECT NEW MESSAGE: If we see a different ID on reasoning OR tool_call, finalize current 36 // This handles both: reasoning → tool_call transitions AND tool_call → reasoning transitions 37 if ((chunkType === 'reasoning_message' || chunkType === 'tool_call_message') && chunkId) { 38 if (lastMessageIdRef.current && chunkId !== lastMessageIdRef.current) { 39 console.log('🔄 NEW MESSAGE DETECTED - finalizing previous'); 40 chatStore.finalizeCurrentMessage(); 41 } 42 lastMessageIdRef.current = chunkId; 43 } 44 45 // ACCUMULATE BASED ON TYPE 46 if (chunkType === 'reasoning_message' && chunk.reasoning && chunkId) { 47 chatStore.accumulateReasoning(chunkId, chunk.reasoning); 48 } 49 else if (chunkType === 'tool_call_message' && chunkId) { 50 // SDK v1.0: tool_calls is now an array 51 const toolCalls = (chunk as any).tool_calls || [(chunk as any).toolCall || (chunk as any).tool_call].filter(Boolean); 52 if (toolCalls.length > 0) { 53 const toolCall = toolCalls[0]; 54 const toolName = toolCall.name || toolCall.tool_name || 'unknown'; 55 // Try multiple places for arguments 56 let args = toolCall.arguments || toolCall.args || ''; 57 58 // If args is an object, format it as a string 59 if (typeof args === 'object' && args !== null) { 60 args = JSON.stringify(args); 61 } 62 63 console.log('🔧 Tool call:', toolName, 'args:', args); 64 chatStore.accumulateToolCall(chunkId, toolName, args); 65 } 66 } 67 else if (chunkType === 'assistant_message' && chunkId) { 68 let contentText = ''; 69 const content = chunk.content as any; 70 71 if (typeof content === 'string') { 72 contentText = content; 73 } else if (Array.isArray(content)) { 74 contentText = content 75 .filter((item: any) => item.type === 'text') 76 .map((item: any) => item.text || '') 77 .join(''); 78 } else if (content?.text) { 79 contentText = content.text; 80 } 81 82 if (contentText) { 83 chatStore.accumulateAssistant(chunkId, contentText); 84 } 85 } 86 // tool_return_message - just log, we'll handle pairing later 87 else if (chunkType === 'tool_return_message') { 88 console.log('📨 Tool return received'); 89 } 90 }, [chatStore]); 91 92 // Send a message with streaming 93 const sendMessage = useCallback( 94 async (messageText: string, imagesToSend: Array<{ uri: string; base64: string; mediaType: string }>) => { 95 if ((!messageText.trim() && imagesToSend.length === 0) || !coAgent || chatStore.isSendingMessage) { 96 return; 97 } 98 99 console.log('sendMessage called - messageText:', messageText, 'imagesToSend length:', imagesToSend.length); 100 101 chatStore.setSendingMessage(true); 102 103 // Immediately add user message to UI 104 let tempMessageContent: any; 105 if (imagesToSend.length > 0) { 106 const contentParts = []; 107 108 // Always add text part first (even if empty) when images present 109 contentParts.push({ 110 type: 'text', 111 text: messageText || '', 112 }); 113 114 // Add images after text 115 for (const img of imagesToSend) { 116 contentParts.push({ 117 type: 'image', 118 source: { 119 type: 'base64', 120 mediaType: img.mediaType, 121 data: img.base64, 122 }, 123 }); 124 } 125 126 tempMessageContent = contentParts; 127 } else { 128 tempMessageContent = messageText; 129 } 130 131 const tempUserMessage: LettaMessage = { 132 id: `temp-${Date.now()}`, 133 role: 'user', 134 message_type: 'user_message', 135 content: tempMessageContent, 136 created_at: new Date().toISOString(), 137 } as LettaMessage; 138 139 chatStore.addMessage(tempUserMessage); 140 141 try { 142 chatStore.startStreaming(); 143 lastMessageIdRef.current = null; // Reset for new stream 144 145 // Build message content 146 let messageContent: any; 147 if (imagesToSend.length > 0) { 148 const contentParts = []; 149 150 // Always add text part first (even if empty) when images present 151 contentParts.push({ 152 type: 'text', 153 text: messageText || '', 154 }); 155 156 for (const img of imagesToSend) { 157 contentParts.push({ 158 type: 'image', 159 source: { 160 type: 'base64', 161 mediaType: img.mediaType, 162 data: img.base64, 163 }, 164 }); 165 } 166 167 messageContent = contentParts; 168 } else { 169 messageContent = messageText; 170 } 171 172 const payload = { 173 messages: [{ role: 'user', content: messageContent }], 174 use_assistant_message: true, 175 stream_tokens: true, 176 }; 177 178 await lettaApi.sendMessageStream( 179 coAgent.id, 180 payload, 181 (chunk: StreamingChunk) => { 182 handleStreamingChunk(chunk); 183 }, 184 async (response) => { 185 console.log('🎬 STREAM COMPLETE'); 186 187 // Finalize the last message 188 chatStore.finalizeCurrentMessage(); 189 190 // Get all completed messages 191 const { currentStreamingMessage, completedStreamingMessages } = useChatStore.getState(); 192 193 const allStreamedMessages = [...completedStreamingMessages]; 194 if (currentStreamingMessage) { 195 allStreamedMessages.push(currentStreamingMessage); 196 } 197 198 console.log('📨 Converting', allStreamedMessages.length, 'streamed messages to permanent messages'); 199 200 // Convert to LettaMessage format and add to messages 201 const permanentMessages: LettaMessage[] = allStreamedMessages.map((msg, idx) => { 202 // Format tool call content as Python-style string (like server does) 203 let content = msg.content; 204 if (msg.type === 'tool_call' && msg.toolCallName) { 205 const { formatToolCall } = require('../utils/formatToolCall'); 206 content = formatToolCall(msg.toolCallName, msg.content); 207 } 208 209 return { 210 id: msg.id, 211 role: 'assistant', 212 message_type: msg.type === 'tool_call' ? 'tool_call_message' : 'assistant_message', 213 content: content, 214 reasoning: msg.reasoning, 215 ...(msg.type === 'tool_call' && msg.toolCallName ? { 216 tool_calls: [{ 217 name: msg.toolCallName, 218 arguments: msg.content, // Keep as JSON for parseToolCall fallback 219 }] 220 } : {}), 221 created_at: msg.timestamp, 222 } as any; 223 }); 224 225 // Add to messages array 226 if (permanentMessages.length > 0) { 227 chatStore.addMessages(permanentMessages); 228 } 229 230 // Clear streaming state 231 chatStore.clearAllStreamingState(); 232 chatStore.stopStreaming(); 233 chatStore.setSendingMessage(false); 234 chatStore.clearImages(); 235 236 console.log('✅ Stream finished and converted to messages'); 237 }, 238 (error) => { 239 console.error('Stream error:', error); 240 chatStore.clearAllStreamingState(); 241 chatStore.stopStreaming(); 242 chatStore.setSendingMessage(false); 243 } 244 ); 245 } catch (error) { 246 console.error('Failed to send message:', error); 247 chatStore.clearAllStreamingState(); 248 chatStore.stopStreaming(); 249 chatStore.setSendingMessage(false); 250 throw error; 251 } 252 }, 253 [coAgent, chatStore, handleStreamingChunk] 254 ); 255 256 return { 257 isStreaming: chatStore.isStreaming, 258 isSendingMessage: chatStore.isSendingMessage, 259 sendMessage, 260 }; 261}