diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx
index 981dc9d0e58..5f1ba55808d 100644
--- a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx
@@ -445,6 +445,7 @@ export function UserInput({
sttPrefixRef.current = ''
resetTranscript()
currentFiles.clearAttachedFiles()
+ prevSelectedContextsRef.current = []
currentContext.clearContexts()
if (textareaRef.current) {
diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/user-message-content/user-message-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/user-message-content/user-message-content.tsx
index 3778f8fd1cf..d4a3ea7c7c1 100644
--- a/apps/sim/app/workspace/[workspaceId]/home/components/user-message-content/user-message-content.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/home/components/user-message-content/user-message-content.tsx
@@ -65,14 +65,16 @@ function MentionHighlight({ context }: { context: ChatMessageContext }) {
}
export function UserMessageContent({ content, contexts }: UserMessageContentProps) {
+ const trimmed = content.trim()
+
if (!contexts || contexts.length === 0) {
- return
{content}
+ return
{trimmed}
}
const ranges = computeMentionRanges(content, contexts)
if (ranges.length === 0) {
- return
{content}
+ return
{trimmed}
}
const elements: React.ReactNode[] = []
diff --git a/apps/sim/app/workspace/[workspaceId]/home/home.tsx b/apps/sim/app/workspace/[workspaceId]/home/home.tsx
index 132d87b2a9e..506e48a2196 100644
--- a/apps/sim/app/workspace/[workspaceId]/home/home.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/home/home.tsx
@@ -17,7 +17,7 @@ import { useChatHistory, useMarkTaskRead } from '@/hooks/queries/tasks'
import type { ChatContext } from '@/stores/panel'
import { MothershipChat, MothershipView, TemplatePrompts, UserInput } from './components'
import { getMothershipUseChatOptions, useChat, useMothershipResize } from './hooks'
-import type { FileAttachmentForApi, MothershipResourceType } from './types'
+import type { FileAttachmentForApi, MothershipResource, MothershipResourceType } from './types'
const logger = createLogger('Home')
@@ -115,7 +115,7 @@ export function Home({ chatId }: HomeProps = {}) {
const wasSendingRef = useRef(false)
- useChatHistory(chatId)
+ const { isPending: isChatHistoryPending } = useChatHistory(chatId)
const { mutate: markRead } = useMarkTaskRead(workspaceId)
const { mothershipRef, handleResizePointerDown, clearWidth } = useMothershipResize()
@@ -157,7 +157,7 @@ export function Home({ chatId }: HomeProps = {}) {
removeFromQueue,
sendNow,
editQueuedMessage,
- streamingFile,
+ previewSession,
genericResourceData,
} = useChat(
workspaceId,
@@ -228,7 +228,7 @@ export function Home({ chatId }: HomeProps = {}) {
workspace_id: workspaceId,
view: 'mothership',
})
- stopGeneration()
+ void stopGeneration().catch(() => {})
}, [stopGeneration, workspaceId])
const handleSubmit = useCallback(
@@ -299,7 +299,19 @@ export function Home({ chatId }: HomeProps = {}) {
[resolveResourceFromContext, removeResource]
)
+ const handleWorkspaceResourceSelect = useCallback(
+ (resource: MothershipResource) => {
+ const wasAdded = addResource(resource)
+ if (!wasAdded) {
+ setActiveResourceId(resource.id)
+ }
+ handleResourceEvent()
+ },
+ [addResource, handleResourceEvent, setActiveResourceId]
+ )
+
const hasMessages = messages.length > 0
+ const showChatSkeleton = Boolean(chatId) && !hasMessages && isChatHistoryPending
useEffect(() => {
if (hasMessages) return
@@ -358,6 +370,7 @@ export function Home({ chatId }: HomeProps = {}) {
messages={messages}
isSending={isSending}
isReconnecting={isReconnecting}
+ isLoading={showChatSkeleton}
onSubmit={handleSubmit}
onStopGeneration={handleStopGeneration}
messageQueue={messageQueue}
@@ -368,6 +381,7 @@ export function Home({ chatId }: HomeProps = {}) {
chatId={resolvedChatId}
onContextAdd={handleContextAdd}
onContextRemove={handleContextRemove}
+ onWorkspaceResourceSelect={handleWorkspaceResourceSelect}
editValue={editingInputValue}
onEditValueConsumed={clearEditingValue}
animateInput={isInputEntering}
@@ -401,8 +415,8 @@ export function Home({ chatId }: HomeProps = {}) {
onReorderResources={reorderResources}
onCollapse={collapseResource}
isCollapsed={isResourceCollapsed}
- streamingFile={streamingFile}
- genericResourceData={genericResourceData}
+ previewSession={previewSession}
+ genericResourceData={genericResourceData ?? undefined}
className={skipResourceTransition ? '!transition-none' : undefined}
/>
diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts
index ac5bf1ab167..89cdf1fe80e 100644
--- a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts
+++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts
@@ -2,66 +2,118 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { usePathname } from 'next/navigation'
+import { toDisplayMessage } from '@/lib/copilot/chat/display-message'
+import type {
+ PersistedFileAttachment,
+ PersistedMessage,
+} from '@/lib/copilot/chat/persisted-message'
+import { COPILOT_CHAT_API_PATH, MOTHERSHIP_CHAT_API_PATH } from '@/lib/copilot/constants'
+import type { MothershipStreamV1EventEnvelope } from '@/lib/copilot/generated/mothership-stream-v1'
import {
- cancelRunToolExecution,
- executeRunToolOnClient,
- markRunToolManuallyStopped,
- reportManualRunToolStop,
-} from '@/lib/copilot/client-sse/run-tool-execution'
+ MothershipStreamV1EventType,
+ MothershipStreamV1ResourceOp,
+ MothershipStreamV1RunKind,
+ MothershipStreamV1SessionKind,
+ MothershipStreamV1SpanLifecycleEvent,
+ MothershipStreamV1SpanPayloadKind,
+ MothershipStreamV1ToolOutcome,
+ MothershipStreamV1ToolPhase,
+} from '@/lib/copilot/generated/mothership-stream-v1'
import {
- COPILOT_CHAT_API_PATH,
- COPILOT_CHAT_STREAM_API_PATH,
- MOTHERSHIP_CHAT_API_PATH,
-} from '@/lib/copilot/constants'
+ CrawlWebsite,
+ CreateFolder,
+ DeleteFolder,
+ DeleteWorkflow,
+ DeployApi,
+ DeployChat,
+ DeployMcp,
+ GetPageContents,
+ GetWorkflowLogs,
+ Glob,
+ Grep,
+ ManageCredential,
+ ManageCredentialOperation,
+ ManageCustomTool,
+ ManageCustomToolOperation,
+ ManageJob,
+ ManageJobOperation,
+ ManageMcpTool,
+ ManageMcpToolOperation,
+ ManageSkill,
+ ManageSkillOperation,
+ MoveFolder,
+ MoveWorkflow,
+ Read as ReadTool,
+ Redeploy,
+ RenameWorkflow,
+ RunFromBlock,
+ RunWorkflow,
+ RunWorkflowUntilBlock,
+ ScrapePage,
+ SearchOnline,
+ ToolSearchToolRegex,
+ WorkspaceFile,
+ WorkspaceFileOperation,
+} from '@/lib/copilot/generated/tool-catalog-v1'
+import type { FilePreviewSession } from '@/lib/copilot/request/session'
+import type { StreamBatchEvent } from '@/lib/copilot/request/session/types'
import {
extractResourcesFromToolResult,
- isEphemeralResource,
isResourceToolName,
-} from '@/lib/copilot/resource-extraction'
-import { VFS_DIR_TO_RESOURCE } from '@/lib/copilot/resource-types'
-import { isWorkflowToolName } from '@/lib/copilot/workflow-tools'
+} from '@/lib/copilot/resources/extraction'
+import { VFS_DIR_TO_RESOURCE } from '@/lib/copilot/resources/types'
+import { isToolHiddenInUi } from '@/lib/copilot/tools/client/hidden-tools'
+import {
+ cancelRunToolExecution,
+ executeRunToolOnClient,
+ isRunToolActiveForId,
+ markRunToolManuallyStopped,
+ reportManualRunToolStop,
+} from '@/lib/copilot/tools/client/run-tool-execution'
+import { isWorkflowToolName } from '@/lib/copilot/tools/workflow-tools'
import { generateId } from '@/lib/core/utils/uuid'
import { getNextWorkflowColor } from '@/lib/workflows/colors'
import { getQueryClient } from '@/app/_shell/providers/get-query-client'
import { invalidateResourceQueries } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry'
-import type {
- ChatMessage,
- ChatMessageAttachment,
- ContentBlock,
- ContentBlockType,
- FileAttachmentForApi,
- GenericResourceData,
- GenericResourceEntry,
- MothershipResource,
- MothershipResourceType,
- QueuedMessage,
- SSEPayload,
- SSEPayloadData,
- ToolCallStatus,
-} from '@/app/workspace/[workspaceId]/home/types'
+import {
+ buildCompletedPreviewSessions,
+ type FilePreviewSessionsState,
+ INITIAL_FILE_PREVIEW_SESSIONS_STATE,
+ reduceFilePreviewSessions,
+ useFilePreviewSessions,
+} from '@/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions'
import { deploymentKeys } from '@/hooks/queries/deployments'
import {
fetchChatHistory,
- type StreamSnapshot,
type TaskChatHistory,
- type TaskStoredContentBlock,
- type TaskStoredFileAttachment,
- type TaskStoredMessage,
- type TaskStoredToolCall,
taskKeys,
useChatHistory,
} from '@/hooks/queries/tasks'
import { getFolderMap } from '@/hooks/queries/utils/folder-cache'
+import { folderKeys } from '@/hooks/queries/utils/folder-keys'
import { invalidateWorkflowSelectors } from '@/hooks/queries/utils/invalidate-workflow-lists'
import { getTopInsertionSortOrder } from '@/hooks/queries/utils/top-insertion-sort-order'
import { getWorkflowById, getWorkflows } from '@/hooks/queries/utils/workflow-cache'
import { workflowKeys } from '@/hooks/queries/workflows'
+import { workspaceFilesKeys } from '@/hooks/queries/workspace-files'
import { useExecutionStream } from '@/hooks/use-execution-stream'
import { useExecutionStore } from '@/stores/execution/store'
import type { ChatContext } from '@/stores/panel'
-import { consolePersistence, useTerminalConsoleStore } from '@/stores/terminal'
+import { useTerminalConsoleStore } from '@/stores/terminal'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
+import { useWorkflowStore } from '@/stores/workflows/workflow/store'
+import type {
+ ChatMessage,
+ ContentBlock,
+ FileAttachmentForApi,
+ GenericResourceData,
+ MothershipResource,
+ MothershipResourceType,
+ QueuedMessage,
+} from '../types'
+
+const FILE_SUBAGENT_ID = 'file'
export interface UseChatReturn {
messages: ChatMessage[]
@@ -85,226 +137,429 @@ export interface UseChatReturn {
removeFromQueue: (id: string) => void
sendNow: (id: string) => Promise
editQueuedMessage: (id: string) => QueuedMessage | undefined
- streamingFile: { fileName: string; content: string } | null
- genericResourceData: GenericResourceData
+ previewSession: FilePreviewSession | null
+ genericResourceData: GenericResourceData | null
}
-const STATE_TO_STATUS: Record = {
- success: 'success',
- error: 'error',
- cancelled: 'cancelled',
- rejected: 'error',
- skipped: 'success',
-} as const
+const DEPLOY_TOOL_NAMES: Set = new Set([
+ DeployApi.id,
+ DeployChat.id,
+ DeployMcp.id,
+ Redeploy.id,
+])
-const DEPLOY_TOOL_NAMES = new Set(['deploy_api', 'deploy_chat', 'deploy_mcp', 'redeploy'])
+const FOLDER_TOOL_NAMES: Set = new Set([CreateFolder.id, DeleteFolder.id, MoveFolder.id])
+
+const WORKFLOW_MUTATION_TOOL_NAMES: Set = new Set([
+ MoveWorkflow.id,
+ RenameWorkflow.id,
+ DeleteWorkflow.id,
+])
const RECONNECT_TAIL_ERROR =
'Live reconnect failed before the stream finished. The latest response may be incomplete.'
-const TERMINAL_STREAM_STATUSES = new Set(['complete', 'error', 'cancelled'])
const MAX_RECONNECT_ATTEMPTS = 10
const RECONNECT_BASE_DELAY_MS = 1000
const RECONNECT_MAX_DELAY_MS = 30_000
-interface StreamEventEnvelope {
- eventId: number
- streamId: string
- event: Record
-}
+const logger = createLogger('useChat')
-interface StreamBatchResponse {
- success: boolean
- events: StreamEventEnvelope[]
- status: string
-}
+type StreamPayload = Record
-interface StreamTerminationResult {
- sawStreamError: boolean
- sawDoneEvent: boolean
- lastEventId: number
+function stringParam(value: unknown): string | undefined {
+ return typeof value === 'string' && value.trim() ? value.trim() : undefined
}
-interface StreamProcessingOptions {
- expectedGen?: number
- initialLastEventId?: number
- preserveExistingState?: boolean
+function stringArrayParam(value: unknown): string[] {
+ if (!Array.isArray(value)) return []
+ return value.filter((item): item is string => typeof item === 'string' && item.trim().length > 0)
}
-interface AttachToStreamOptions {
- streamId: string
- assistantId: string
- expectedGen: number
- snapshot?: StreamSnapshot | null
- initialLastEventId?: number
+function resolveWorkflowNameForDisplay(workflowId: unknown): string | undefined {
+ const id = stringParam(workflowId)
+ if (!id) return undefined
+ const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
+ if (!workspaceId) return undefined
+ return getWorkflowById(workspaceId, id)?.name
}
-interface AttachToStreamResult {
- aborted: boolean
- error: boolean
+function resolveBlockNameForDisplay(blockId: unknown): string | undefined {
+ const id = stringParam(blockId)
+ if (!id) return undefined
+ return useWorkflowStore.getState().blocks[id]?.name
}
-interface PendingStreamRecovery {
- streamId: string
- snapshot?: StreamSnapshot | null
-}
+function resolveWorkspaceFileDisplayTitle(
+ operation: unknown,
+ title: unknown,
+ targetFileName?: unknown
+): string | undefined {
+ const chunkTitle = stringParam(title)
+ const fileName = stringParam(targetFileName)
+ let verb = 'Writing'
+
+ switch (operation) {
+ case WorkspaceFileOperation.append:
+ verb = 'Adding'
+ break
+ case WorkspaceFileOperation.patch:
+ verb = 'Editing'
+ break
+ case WorkspaceFileOperation.update:
+ verb = 'Writing'
+ break
+ }
-function isTerminalStreamStatus(status?: string | null): boolean {
- return Boolean(status && TERMINAL_STREAM_STATUSES.has(status))
+ if (chunkTitle) return `${verb} ${chunkTitle}`
+ if (fileName) return `${verb} ${fileName}`
+ return undefined
}
-function isActiveStreamConflictError(input: unknown): boolean {
- if (typeof input !== 'string') return false
- return input.includes('A response is already in progress for this chat')
+function resolveOperationDisplayTitle(
+ operation: unknown,
+ labels: Partial>,
+ fallback: string
+): string {
+ const label = typeof operation === 'string' ? labels[operation] : undefined
+ return label ?? fallback
}
-/**
- * Extracts tool call IDs from snapshot events so that replayed client-executable
- * tool calls are not re-executed after a page refresh.
- */
-function extractToolCallIdsFromSnapshot(snapshot?: StreamSnapshot | null): Set {
- const ids = new Set()
- if (!snapshot?.events) return ids
- for (const entry of snapshot.events) {
- const event = entry.event
- if (event.type === 'tool_call' && typeof event.toolCallId === 'string') {
- ids.add(event.toolCallId)
- }
+function resolveToolDisplayTitle(name: string, args?: Record): string | undefined {
+ if (!args) return undefined
+
+ if (name === WorkspaceFile.id) {
+ const target = asPayloadRecord(args.target)
+ return resolveWorkspaceFileDisplayTitle(args.operation, args.title, target?.fileName)
+ }
+
+ if (name === SearchOnline.id) {
+ const toolTitle = stringParam(args.toolTitle)
+ return toolTitle ? `Searching online for ${toolTitle}` : 'Searching online'
+ }
+
+ if (name === Grep.id) {
+ const toolTitle = stringParam(args.toolTitle)
+ return toolTitle ? `Searching for ${toolTitle}` : 'Searching'
+ }
+
+ if (name === Glob.id) {
+ const toolTitle = stringParam(args.toolTitle)
+ return toolTitle ? `Finding ${toolTitle}` : 'Finding files'
+ }
+
+ if (name === ScrapePage.id) {
+ const url = stringParam(args.url)
+ return url ? `Scraping ${url}` : 'Scraping page'
+ }
+
+ if (name === CrawlWebsite.id) {
+ const url = stringParam(args.url)
+ return url ? `Crawling ${url}` : 'Crawling website'
+ }
+
+ if (name === GetPageContents.id) {
+ const urls = stringArrayParam(args.urls)
+ if (urls.length === 1) return `Getting ${urls[0]}`
+ if (urls.length > 1) return `Getting ${urls.length} pages`
+ return 'Getting page contents'
+ }
+
+ if (name === ManageCustomTool.id) {
+ return resolveOperationDisplayTitle(
+ args.operation,
+ {
+ [ManageCustomToolOperation.add]: 'Creating custom tool',
+ [ManageCustomToolOperation.edit]: 'Updating custom tool',
+ [ManageCustomToolOperation.delete]: 'Deleting custom tool',
+ [ManageCustomToolOperation.list]: 'Listing custom tools',
+ },
+ 'Custom tool action'
+ )
+ }
+
+ if (name === ManageMcpTool.id) {
+ return resolveOperationDisplayTitle(
+ args.operation,
+ {
+ [ManageMcpToolOperation.add]: 'Creating MCP server',
+ [ManageMcpToolOperation.edit]: 'Updating MCP server',
+ [ManageMcpToolOperation.delete]: 'Deleting MCP server',
+ [ManageMcpToolOperation.list]: 'Listing MCP servers',
+ },
+ 'MCP server action'
+ )
+ }
+
+ if (name === ManageSkill.id) {
+ return resolveOperationDisplayTitle(
+ args.operation,
+ {
+ [ManageSkillOperation.add]: 'Creating skill',
+ [ManageSkillOperation.edit]: 'Updating skill',
+ [ManageSkillOperation.delete]: 'Deleting skill',
+ [ManageSkillOperation.list]: 'Listing skills',
+ },
+ 'Skill action'
+ )
+ }
+
+ if (name === ManageJob.id) {
+ return resolveOperationDisplayTitle(
+ args.operation,
+ {
+ [ManageJobOperation.create]: 'Creating job',
+ [ManageJobOperation.get]: 'Getting job',
+ [ManageJobOperation.update]: 'Updating job',
+ [ManageJobOperation.delete]: 'Deleting job',
+ [ManageJobOperation.list]: 'Listing jobs',
+ },
+ 'Job action'
+ )
+ }
+
+ if (name === ManageCredential.id) {
+ return resolveOperationDisplayTitle(
+ args.operation,
+ {
+ [ManageCredentialOperation.rename]: 'Renaming credential',
+ [ManageCredentialOperation.delete]: 'Deleting credential',
+ },
+ 'Credential action'
+ )
+ }
+
+ if (name === RunWorkflow.id) {
+ const workflowName = resolveWorkflowNameForDisplay(args.workflowId)
+ return workflowName ? `Running ${workflowName}` : 'Running workflow'
+ }
+
+ if (name === RunFromBlock.id) {
+ const workflowName = resolveWorkflowNameForDisplay(args.workflowId)
+ const blockName = resolveBlockNameForDisplay(args.startBlockId)
+ if (workflowName && blockName) return `Running ${workflowName} from ${blockName}`
+ if (workflowName) return `Running ${workflowName}`
+ if (blockName) return `Running from ${blockName}`
+ return 'Running workflow'
+ }
+
+ if (name === RunWorkflowUntilBlock.id) {
+ const workflowName = resolveWorkflowNameForDisplay(args.workflowId)
+ const blockName = resolveBlockNameForDisplay(args.stopAfterBlockId)
+ if (workflowName && blockName) return `Running ${workflowName} until ${blockName}`
+ if (workflowName) return `Running ${workflowName}`
+ if (blockName) return `Running until ${blockName}`
+ return 'Running workflow'
}
- return ids
+
+ if (name === GetWorkflowLogs.id) {
+ const workflowName = resolveWorkflowNameForDisplay(args.workflowId)
+ return workflowName ? `Getting logs for ${workflowName}` : 'Getting logs'
+ }
+
+ return undefined
}
-function buildReplayStream(events: StreamEventEnvelope[]): ReadableStream {
- const encoder = new TextEncoder()
- return new ReadableStream({
- start(controller) {
- if (events.length > 0) {
- const payload = events
- .map(
- (entry) =>
- `data: ${JSON.stringify({ ...entry.event, eventId: entry.eventId, streamId: entry.streamId })}\n\n`
- )
- .join('')
- controller.enqueue(encoder.encode(payload))
- }
- controller.close()
- },
- })
+function decodeStreamingString(value: string): string {
+ return value
+ .replace(/\\u([0-9a-fA-F]{4})/g, (_: string, hex: string) =>
+ String.fromCharCode(Number.parseInt(hex, 16))
+ )
+ .replace(/\\"/g, '"')
+ .replace(/\\\\/g, '\\')
}
-function mapStoredBlock(block: TaskStoredContentBlock): ContentBlock {
- if (block.type === 'thinking') {
- return {
- type: 'text',
- content: block.content ? `${block.content}` : '',
- }
+function matchStreamingStringArg(streamingArgs: string, key: string): string | undefined {
+ const match = streamingArgs.match(new RegExp(`"${key}"\\s*:\\s*"([^"]*)"`, 'm'))
+ return match?.[1] ? decodeStreamingString(match[1]) : undefined
+}
+
+function resolveStreamingToolDisplayTitle(name: string, streamingArgs: string): string | undefined {
+ if (name === WorkspaceFile.id) {
+ return resolveWorkspaceFileDisplayTitle(
+ matchStreamingStringArg(streamingArgs, 'operation'),
+ matchStreamingStringArg(streamingArgs, 'title'),
+ matchStreamingStringArg(streamingArgs, 'fileName')
+ )
}
- const mapped: ContentBlock = {
- type: block.type as ContentBlockType,
- content: block.content,
+ if (name === SearchOnline.id) {
+ const toolTitle = matchStreamingStringArg(streamingArgs, 'toolTitle')
+ return toolTitle ? `Searching online for ${toolTitle}` : undefined
}
- if (block.type === 'tool_call' && block.toolCall) {
- const resolvedStatus = STATE_TO_STATUS[block.toolCall.state ?? ''] ?? 'error'
- mapped.toolCall = {
- id: block.toolCall.id ?? '',
- name: block.toolCall.name ?? 'unknown',
- status: resolvedStatus,
- displayTitle:
- resolvedStatus === 'cancelled' ? 'Stopped by user' : block.toolCall.display?.text,
- params: block.toolCall.params,
- calledBy: block.toolCall.calledBy,
- result: block.toolCall.result,
- }
+ if (name === Grep.id) {
+ const toolTitle = matchStreamingStringArg(streamingArgs, 'toolTitle')
+ return toolTitle ? `Searching for ${toolTitle}` : undefined
}
- return mapped
-}
+ if (name === Glob.id) {
+ const toolTitle = matchStreamingStringArg(streamingArgs, 'toolTitle')
+ return toolTitle ? `Finding ${toolTitle}` : undefined
+ }
-function mapStoredToolCall(tc: TaskStoredToolCall): ContentBlock {
- const resolvedStatus = (STATE_TO_STATUS[tc.status] ?? 'error') as ToolCallStatus
- return {
- type: 'tool_call',
- toolCall: {
- id: tc.id,
- name: tc.name,
- status: resolvedStatus,
- displayTitle: resolvedStatus === 'cancelled' ? 'Stopped by user' : undefined,
- params: tc.params,
- result:
- tc.result != null
- ? {
- success: tc.status === 'success',
- output: tc.result,
- error: tc.error,
- }
- : undefined,
- },
+ if (name === ScrapePage.id) {
+ const url = matchStreamingStringArg(streamingArgs, 'url')
+ return url ? `Scraping ${url}` : undefined
}
-}
-function toDisplayAttachment(f: TaskStoredFileAttachment): ChatMessageAttachment {
- return {
- id: f.id,
- filename: f.filename,
- media_type: f.media_type,
- size: f.size,
- previewUrl: f.media_type.startsWith('image/')
- ? `/api/files/serve/${encodeURIComponent(f.key)}?context=mothership`
- : undefined,
+ if (name === CrawlWebsite.id) {
+ const url = matchStreamingStringArg(streamingArgs, 'url')
+ return url ? `Crawling ${url}` : undefined
}
-}
-function mapStoredMessage(msg: TaskStoredMessage): ChatMessage {
- const mapped: ChatMessage = {
- id: msg.id,
- role: msg.role,
- content: msg.content,
- ...(msg.requestId ? { requestId: msg.requestId } : {}),
+ if (name === ManageCustomTool.id) {
+ return resolveOperationDisplayTitle(
+ matchStreamingStringArg(streamingArgs, 'operation'),
+ {
+ [ManageCustomToolOperation.add]: 'Creating custom tool',
+ [ManageCustomToolOperation.edit]: 'Updating custom tool',
+ [ManageCustomToolOperation.delete]: 'Deleting custom tool',
+ [ManageCustomToolOperation.list]: 'Listing custom tools',
+ },
+ 'Custom tool action'
+ )
}
- const hasContentBlocks = Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0
- const hasToolCalls = Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0
- const contentBlocksHaveTools =
- hasContentBlocks && msg.contentBlocks!.some((b) => b.type === 'tool_call')
+ if (name === ManageMcpTool.id) {
+ return resolveOperationDisplayTitle(
+ matchStreamingStringArg(streamingArgs, 'operation'),
+ {
+ [ManageMcpToolOperation.add]: 'Creating MCP server',
+ [ManageMcpToolOperation.edit]: 'Updating MCP server',
+ [ManageMcpToolOperation.delete]: 'Deleting MCP server',
+ [ManageMcpToolOperation.list]: 'Listing MCP servers',
+ },
+ 'MCP server action'
+ )
+ }
- if (hasContentBlocks && (!hasToolCalls || contentBlocksHaveTools)) {
- const blocks = msg.contentBlocks!.map(mapStoredBlock)
- const hasText = blocks.some((b) => b.type === 'text' && b.content?.trim())
- if (!hasText && msg.content?.trim()) {
- blocks.push({ type: 'text', content: msg.content })
- }
- mapped.contentBlocks = blocks
- } else if (hasToolCalls) {
- const blocks: ContentBlock[] = msg.toolCalls!.map(mapStoredToolCall)
- if (msg.content?.trim()) {
- blocks.push({ type: 'text', content: msg.content })
- }
- mapped.contentBlocks = blocks
+ if (name === ManageSkill.id) {
+ return resolveOperationDisplayTitle(
+ matchStreamingStringArg(streamingArgs, 'operation'),
+ {
+ [ManageSkillOperation.add]: 'Creating skill',
+ [ManageSkillOperation.edit]: 'Updating skill',
+ [ManageSkillOperation.delete]: 'Deleting skill',
+ [ManageSkillOperation.list]: 'Listing skills',
+ },
+ 'Skill action'
+ )
}
- if (Array.isArray(msg.fileAttachments) && msg.fileAttachments.length > 0) {
- mapped.attachments = msg.fileAttachments.map(toDisplayAttachment)
+ if (name === ManageJob.id) {
+ return resolveOperationDisplayTitle(
+ matchStreamingStringArg(streamingArgs, 'operation'),
+ {
+ [ManageJobOperation.create]: 'Creating job',
+ [ManageJobOperation.get]: 'Getting job',
+ [ManageJobOperation.update]: 'Updating job',
+ [ManageJobOperation.delete]: 'Deleting job',
+ [ManageJobOperation.list]: 'Listing jobs',
+ },
+ 'Job action'
+ )
}
- if (Array.isArray(msg.contexts) && msg.contexts.length > 0) {
- mapped.contexts = msg.contexts.map((c) => ({
- kind: c.kind,
- label: c.label,
- ...(c.workflowId && { workflowId: c.workflowId }),
- ...(c.knowledgeId && { knowledgeId: c.knowledgeId }),
- ...(c.tableId && { tableId: c.tableId }),
- ...(c.fileId && { fileId: c.fileId }),
- ...(c.folderId && { folderId: c.folderId }),
- }))
+ if (name === ManageCredential.id) {
+ return resolveOperationDisplayTitle(
+ matchStreamingStringArg(streamingArgs, 'operation'),
+ {
+ [ManageCredentialOperation.rename]: 'Renaming credential',
+ [ManageCredentialOperation.delete]: 'Deleting credential',
+ },
+ 'Credential action'
+ )
}
- return mapped
+ return undefined
}
-const logger = createLogger('useChat')
+type StreamToolUI = {
+ hidden?: boolean
+ title?: string
+ phaseLabel?: string
+ clientExecutable?: boolean
+}
+
+type StreamBatchResponse = {
+ success: boolean
+ events: StreamBatchEvent[]
+ previewSessions?: FilePreviewSession[]
+ status: string
+}
+
+function buildChatHistoryHydrationKey(chatHistory: TaskChatHistory): string {
+ const resourceKey = chatHistory.resources
+ .map((resource) => `${resource.type}:${resource.id}:${resource.title}`)
+ .join('|')
+ const messageKey = chatHistory.messages.map((message) => message.id).join('|')
+ const streamSnapshot = chatHistory.streamSnapshot
+ const snapshotKey = streamSnapshot
+ ? [
+ streamSnapshot.status,
+ streamSnapshot.events.length,
+ streamSnapshot.events[streamSnapshot.events.length - 1]?.eventId ?? '',
+ streamSnapshot.previewSessions
+ .map(
+ (session) =>
+ `${session.id}:${session.previewVersion}:${session.status}:${session.updatedAt}`
+ )
+ .join('|'),
+ ].join('~')
+ : 'none'
+
+ return [
+ chatHistory.id,
+ chatHistory.activeStreamId ?? '',
+ messageKey,
+ resourceKey,
+ snapshotKey,
+ ].join('::')
+}
+
+const TERMINAL_STREAM_STATUSES = new Set(['complete', 'error', 'cancelled'])
+
+function isTerminalStreamStatus(status: string | null | undefined): boolean {
+ return TERMINAL_STREAM_STATUSES.has(status ?? '')
+}
+
+const sseEncoder = new TextEncoder()
+function buildReplayStream(events: StreamBatchEvent[]): ReadableStream {
+ return new ReadableStream({
+ start(controller) {
+ const payload = events
+ .map(
+ (entry) =>
+ `data: ${JSON.stringify({ ...entry.event, eventId: entry.eventId, streamId: entry.streamId })}\n\n`
+ )
+ .join('')
+ controller.enqueue(sseEncoder.encode(payload))
+ controller.close()
+ },
+ })
+}
+
+function asPayloadRecord(value: unknown): StreamPayload | undefined {
+ return value && typeof value === 'object' ? (value as StreamPayload) : undefined
+}
+
+function getPayloadData(event: MothershipStreamV1EventEnvelope): StreamPayload {
+ return asPayloadRecord(event.payload) ?? {}
+}
+
+function getToolUI(payload: StreamPayload): StreamToolUI | undefined {
+ const raw = asPayloadRecord(payload.ui)
+ if (!raw) {
+ return undefined
+ }
-function getPayloadData(payload: SSEPayload): SSEPayloadData | undefined {
- return typeof payload.data === 'object' ? payload.data : undefined
+ return {
+ ...(typeof raw.hidden === 'boolean' ? { hidden: raw.hidden } : {}),
+ ...(typeof raw.title === 'string' ? { title: raw.title } : {}),
+ ...(typeof raw.phaseLabel === 'string' ? { phaseLabel: raw.phaseLabel } : {}),
+ ...(typeof raw.clientExecutable === 'boolean'
+ ? { clientExecutable: raw.clientExecutable }
+ : {}),
+ }
}
/** Adds a workflow to the React Query cache with a top-insertion sort order if it doesn't already exist. */
@@ -418,14 +673,17 @@ export function useChat(
const [error, setError] = useState(null)
const [resolvedChatId, setResolvedChatId] = useState(initialChatId)
const [resources, setResources] = useState([])
- const [activeResourceId, setActiveResourceId] = useState(null)
- const initialActiveResourceIdRef = useRef(options?.initialActiveResourceId)
+ const [activeResourceId, setActiveResourceId] = useState(
+ options?.initialActiveResourceId ?? null
+ )
+ const [genericResourceData, setGenericResourceData] = useState(null)
const onResourceEventRef = useRef(options?.onResourceEvent)
onResourceEventRef.current = options?.onResourceEvent
const apiPathRef = useRef(options?.apiPath ?? MOTHERSHIP_CHAT_API_PATH)
apiPathRef.current = options?.apiPath ?? MOTHERSHIP_CHAT_API_PATH
const stopPathRef = useRef(options?.stopPath ?? '/api/mothership/chat/stop')
stopPathRef.current = options?.stopPath ?? '/api/mothership/chat/stop'
+ const pendingStopPromiseRef = useRef | null>(null)
const workflowIdRef = useRef(options?.workflowId)
workflowIdRef.current = options?.workflowId
const onToolResultRef = useRef(options?.onToolResult)
@@ -449,46 +707,184 @@ export function useChat(
const activeResourceIdRef = useRef(effectiveActiveResourceId)
activeResourceIdRef.current = effectiveActiveResourceId
- const [streamingFile, setStreamingFile] = useState<{
- fileName: string
- content: string
- } | null>(null)
- const streamingFileRef = useRef(streamingFile)
- streamingFileRef.current = streamingFile
-
- const [genericResourceData, setGenericResourceData] = useState({
- entries: [],
+ const {
+ previewSession,
+ previewSessionsById,
+ activePreviewSessionId,
+ hydratePreviewSessions,
+ upsertPreviewSession,
+ completePreviewSession,
+ removePreviewSession,
+ resetPreviewSessions,
+ } = useFilePreviewSessions()
+ const previewSessionRef = useRef(previewSession)
+ previewSessionRef.current = previewSession
+ const previewSessionsRef = useRef(previewSessionsById)
+ previewSessionsRef.current = previewSessionsById
+ const activePreviewSessionIdRef = useRef(activePreviewSessionId)
+ activePreviewSessionIdRef.current = activePreviewSessionId
+ const previewSessionsStateRef = useRef({
+ activeSessionId: activePreviewSessionId,
+ sessions: previewSessionsById,
})
- const genericResourceDataRef = useRef({ entries: [] })
+ previewSessionsStateRef.current = {
+ activeSessionId: activePreviewSessionId,
+ sessions: previewSessionsById,
+ }
+
+ const syncPreviewSessionRefs = useCallback((nextState: FilePreviewSessionsState) => {
+ previewSessionsStateRef.current = nextState
+ previewSessionsRef.current = nextState.sessions
+ activePreviewSessionIdRef.current = nextState.activeSessionId
+ previewSessionRef.current =
+ nextState.activeSessionId !== null
+ ? (nextState.sessions[nextState.activeSessionId] ?? null)
+ : null
+ }, [])
+
+ const applyPreviewSessionUpdate = useCallback(
+ (session: FilePreviewSession, options?: { activate?: boolean }) => {
+ const nextState = reduceFilePreviewSessions(previewSessionsStateRef.current, {
+ type: 'upsert',
+ session,
+ ...(options?.activate === false ? { activate: false } : {}),
+ })
+ syncPreviewSessionRefs(nextState)
+ upsertPreviewSession(session, options)
+ return nextState
+ },
+ [syncPreviewSessionRefs, upsertPreviewSession]
+ )
+
+ const applyCompletedPreviewSession = useCallback(
+ (session: FilePreviewSession) => {
+ const nextState = reduceFilePreviewSessions(previewSessionsStateRef.current, {
+ type: 'complete',
+ session,
+ })
+ syncPreviewSessionRefs(nextState)
+ completePreviewSession(session)
+ return nextState
+ },
+ [completePreviewSession, syncPreviewSessionRefs]
+ )
+
+ const reconcileTerminalPreviewSessions = useCallback(() => {
+ const completedAt = new Date().toISOString()
+ const completedSessions = buildCompletedPreviewSessions(
+ previewSessionsStateRef.current.sessions,
+ completedAt
+ )
+
+ for (const session of completedSessions) {
+ applyCompletedPreviewSession(session)
+ }
+ }, [applyCompletedPreviewSession])
+
+ const removePreviewSessionImmediate = useCallback(
+ (sessionId: string) => {
+ const nextState = reduceFilePreviewSessions(previewSessionsStateRef.current, {
+ type: 'remove',
+ sessionId,
+ })
+ syncPreviewSessionRefs(nextState)
+ removePreviewSession(sessionId)
+ return nextState
+ },
+ [removePreviewSession, syncPreviewSessionRefs]
+ )
const [messageQueue, setMessageQueue] = useState([])
const messageQueueRef = useRef([])
messageQueueRef.current = messageQueue
- const [pendingRecoveryMessage, setPendingRecoveryMessage] = useState(null)
- const pendingRecoveryMessageRef = useRef(null)
- pendingRecoveryMessageRef.current = pendingRecoveryMessage
+ const manualQueueSendIdRef = useRef(null)
const sendMessageRef = useRef(async () => {})
const processSSEStreamRef = useRef<
(
reader: ReadableStreamDefaultReader,
assistantId: string,
- options?: StreamProcessingOptions
- ) => Promise
- >(async () => ({
- sawStreamError: false,
- sawDoneEvent: false,
- lastEventId: 0,
- }))
- const finalizeRef = useRef<(options?: { error?: boolean }) => void>(() => {})
- const retryReconnectRef = useRef<
+ expectedGen?: number,
+ options?: { preserveExistingState?: boolean }
+ ) => Promise<{ sawStreamError: boolean; sawComplete: boolean }>
+ >(async () => ({ sawStreamError: false, sawComplete: false }))
+ const attachToExistingStreamRef = useRef<
(opts: {
streamId: string
assistantId: string
- gen: number
- initialSnapshot?: StreamSnapshot | null
- }) => Promise
+ expectedGen: number
+ initialBatch?: StreamBatchResponse | null
+ afterCursor?: string
+ }) => Promise<{ error: boolean; aborted: boolean }>
+ >(async () => ({ error: false, aborted: true }))
+ const retryReconnectRef = useRef<
+ (opts: { streamId: string; assistantId: string; gen: number }) => Promise
>(async () => false)
+ const finalizeRef = useRef<(options?: { error?: boolean }) => void>(() => {})
+
+ const resetEphemeralPreviewState = useCallback(
+ (options?: { removeStreamingResource?: boolean }) => {
+ syncPreviewSessionRefs(INITIAL_FILE_PREVIEW_SESSIONS_STATE)
+ resetPreviewSessions()
+ if (options?.removeStreamingResource) {
+ setResources((current) => current.filter((resource) => resource.id !== 'streaming-file'))
+ }
+ },
+ [resetPreviewSessions, syncPreviewSessionRefs]
+ )
+
+ const syncPreviewResourceChrome = useCallback((session: FilePreviewSession) => {
+ if (session.targetKind === 'new_file') {
+ setResources((current) => {
+ const existing = current.find((resource) => resource.id === 'streaming-file')
+ if (existing) {
+ return current.map((resource) =>
+ resource.id === 'streaming-file'
+ ? { ...resource, title: session.fileName || 'Writing file...' }
+ : resource
+ )
+ }
+ return [
+ ...current,
+ {
+ type: 'file',
+ id: 'streaming-file',
+ title: session.fileName || 'Writing file...',
+ },
+ ]
+ })
+ setActiveResourceId('streaming-file')
+ return
+ }
+
+ if (session.fileId) {
+ setResources((current) => current.filter((resource) => resource.id !== 'streaming-file'))
+ setActiveResourceId(session.fileId)
+ }
+ }, [])
+
+ const seedPreviewSessions = useCallback(
+ (sessions: FilePreviewSession[]) => {
+ if (sessions.length === 0) {
+ return
+ }
+
+ const nextState = reduceFilePreviewSessions(previewSessionsStateRef.current, {
+ type: 'hydrate',
+ sessions,
+ })
+ syncPreviewSessionRefs(nextState)
+ hydratePreviewSessions(sessions)
+ const active =
+ nextState.activeSessionId !== null
+ ? (nextState.sessions[nextState.activeSessionId] ?? null)
+ : null
+ if (active) {
+ syncPreviewResourceChrome(active)
+ }
+ },
+ [hydratePreviewSessions, syncPreviewResourceChrome, syncPreviewSessionRefs]
+ )
const abortControllerRef = useRef(null)
const streamReaderRef = useRef | null>(null)
@@ -496,10 +892,11 @@ export function useChat(
/** Panel/task selection — drives createNewChat + request chatId; may differ from chatIdRef while a stream is still finishing. */
const selectedChatIdRef = useRef(initialChatId)
selectedChatIdRef.current = initialChatId
- const appliedChatIdRef = useRef(undefined)
+ const appliedChatHistoryKeyRef = useRef(undefined)
const pendingUserMsgRef = useRef<{ id: string; content: string } | null>(null)
const streamIdRef = useRef(undefined)
- const lastEventIdRef = useRef(0)
+ const locallyTerminalStreamIdRef = useRef(undefined)
+ const lastCursorRef = useRef('0')
const sendingRef = useRef(false)
const streamGenRef = useRef(0)
const streamingContentRef = useRef('')
@@ -522,7 +919,7 @@ export function useChat(
})
setActiveResourceId(resource.id)
- if (isEphemeralResource(resource)) {
+ if (resource.id === 'streaming-file') {
return true
}
@@ -547,6 +944,56 @@ export function useChat(
setResources(newOrder)
}, [])
+ const startClientWorkflowTool = useCallback(
+ (toolCallId: string, toolName: string, toolArgs: Record) => {
+ if (!isWorkflowToolName(toolName)) {
+ return
+ }
+ if (clientExecutionStartedRef.current.has(toolCallId) && isRunToolActiveForId(toolCallId)) {
+ return
+ }
+ clientExecutionStartedRef.current.add(toolCallId)
+
+ const targetWorkflowId =
+ typeof toolArgs.workflowId === 'string'
+ ? toolArgs.workflowId
+ : useWorkflowRegistry.getState().activeWorkflowId
+ if (targetWorkflowId) {
+ const meta = getWorkflowById(workspaceId, targetWorkflowId)
+ const wasAdded = addResource({
+ type: 'workflow',
+ id: targetWorkflowId,
+ title: meta?.name ?? 'Workflow',
+ })
+ if (!wasAdded && activeResourceIdRef.current !== targetWorkflowId) {
+ setActiveResourceId(targetWorkflowId)
+ }
+ onResourceEventRef.current?.()
+ }
+
+ executeRunToolOnClient(toolCallId, toolName, toolArgs)
+ },
+ [addResource, workspaceId]
+ )
+
+ const recoverPendingClientWorkflowTools = useCallback(
+ (nextMessages: ChatMessage[]) => {
+ for (const message of nextMessages) {
+ for (const block of message.contentBlocks ?? []) {
+ const toolCall = block.toolCall
+ if (!toolCall || !isWorkflowToolName(toolCall.name)) {
+ continue
+ }
+ if (toolCall.status !== 'executing') {
+ continue
+ }
+ startClientWorkflowTool(toolCall.id, toolCall.name, toolCall.params ?? {})
+ }
+ }
+ },
+ [startClientWorkflowTool]
+ )
+
useEffect(() => {
if (sendingRef.current) {
const streamOwnerId = chatIdRef.current
@@ -562,10 +1009,6 @@ export function useChat(
abortControllerRef.current = null
sendingRef.current = false
setIsSending(false)
- setIsReconnecting(false)
- lastEventIdRef.current = 0
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
if (abandonedChatId) {
queryClient.invalidateQueries({ queryKey: taskKeys.detail(abandonedChatId) })
}
@@ -576,32 +1019,29 @@ export function useChat(
}
}
chatIdRef.current = initialChatId
+ lastCursorRef.current = '0'
+ locallyTerminalStreamIdRef.current = undefined
setResolvedChatId(initialChatId)
- appliedChatIdRef.current = undefined
+ appliedChatHistoryKeyRef.current = undefined
setMessages([])
setError(null)
setIsSending(false)
setIsReconnecting(false)
setResources([])
setActiveResourceId(null)
- setStreamingFile(null)
- streamingFileRef.current = null
- genericResourceDataRef.current = { entries: [] }
- setGenericResourceData({ entries: [] })
+ resetEphemeralPreviewState()
setMessageQueue([])
- lastEventIdRef.current = 0
- clientExecutionStartedRef.current.clear()
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
- }, [initialChatId, queryClient])
+ }, [initialChatId, queryClient, resetEphemeralPreviewState])
useEffect(() => {
if (workflowIdRef.current) return
if (!isHomePage || !chatIdRef.current) return
streamGenRef.current++
chatIdRef.current = undefined
+ lastCursorRef.current = '0'
+ locallyTerminalStreamIdRef.current = undefined
setResolvedChatId(undefined)
- appliedChatIdRef.current = undefined
+ appliedChatHistoryKeyRef.current = undefined
abortControllerRef.current = null
sendingRef.current = false
setMessages([])
@@ -610,315 +1050,135 @@ export function useChat(
setIsReconnecting(false)
setResources([])
setActiveResourceId(null)
- setStreamingFile(null)
- streamingFileRef.current = null
- genericResourceDataRef.current = { entries: [] }
- setGenericResourceData({ entries: [] })
+ resetEphemeralPreviewState()
setMessageQueue([])
- lastEventIdRef.current = 0
- clientExecutionStartedRef.current.clear()
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
- }, [isHomePage])
-
- const fetchStreamBatch = useCallback(
- async (
- streamId: string,
- fromEventId: number,
- signal?: AbortSignal
- ): Promise => {
- const response = await fetch(
- `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent(streamId)}&from=${fromEventId}&batch=true`,
- { signal }
- )
+ }, [isHomePage, resetEphemeralPreviewState])
- if (!response.ok) {
- throw new Error(`Stream resume batch failed: ${response.status}`)
- }
+ useEffect(() => {
+ if (!chatHistory) return
- return response.json()
- },
- []
- )
+ const hydrationKey = buildChatHistoryHydrationKey(chatHistory)
+ if (appliedChatHistoryKeyRef.current === hydrationKey) return
- const attachToExistingStream = useCallback(
- async ({
- streamId,
- assistantId,
- expectedGen,
- snapshot,
- initialLastEventId = 0,
- }: AttachToStreamOptions): Promise => {
- let latestEventId = initialLastEventId
- let seedEvents = snapshot?.events ?? []
- let streamStatus = snapshot?.status ?? 'unknown'
- let attachAttempt = 0
+ const activeStreamId = chatHistory.activeStreamId
+ appliedChatHistoryKeyRef.current = hydrationKey
+ const mappedMessages = chatHistory.messages.map(toDisplayMessage)
+ const snapshotEvents = Array.isArray(chatHistory.streamSnapshot?.events)
+ ? chatHistory.streamSnapshot.events
+ : []
+ const snapshotHasCompleteEvent = snapshotEvents.some(
+ (entry) => entry?.event?.type === MothershipStreamV1EventType.complete
+ )
+ const shouldReconnectActiveStream =
+ Boolean(activeStreamId) &&
+ !sendingRef.current &&
+ activeStreamId !== locallyTerminalStreamIdRef.current &&
+ !isTerminalStreamStatus(chatHistory.streamSnapshot?.status) &&
+ !snapshotHasCompleteEvent
+
+ if (!activeStreamId && locallyTerminalStreamIdRef.current) {
+ locallyTerminalStreamIdRef.current = undefined
+ }
+ const shouldPreserveActiveStreamingMessage =
+ sendingRef.current && Boolean(activeStreamId) && activeStreamId === streamIdRef.current
+
+ if (shouldPreserveActiveStreamingMessage) {
+ setMessages((prev) => {
+ const localStreamingAssistant = prev[prev.length - 1]
+ if (localStreamingAssistant?.role !== 'assistant') {
+ return mappedMessages
+ }
- setIsSending(true)
- setIsReconnecting(true)
- setError(null)
+ const nextMessages =
+ mappedMessages[mappedMessages.length - 1]?.role === 'assistant'
+ ? mappedMessages.slice(0, -1)
+ : mappedMessages
- logger.info('Attaching to existing stream', {
- streamId,
- expectedGen,
- initialLastEventId,
- seedEventCount: seedEvents.length,
- streamStatus,
+ return [...nextMessages, localStreamingAssistant]
})
+ } else {
+ setMessages(mappedMessages)
+ }
- try {
- while (streamGenRef.current === expectedGen) {
- if (seedEvents.length > 0) {
- const replayResult = await processSSEStreamRef.current(
- buildReplayStream(seedEvents).getReader(),
- assistantId,
- {
- expectedGen,
- initialLastEventId: latestEventId,
- preserveExistingState: true,
- }
- )
- latestEventId = Math.max(
- replayResult.lastEventId,
- seedEvents[seedEvents.length - 1]?.eventId ?? latestEventId
- )
- lastEventIdRef.current = latestEventId
- seedEvents = []
+ recoverPendingClientWorkflowTools(mappedMessages)
- if (replayResult.sawStreamError) {
- logger.warn('Replay stream ended with error event', { streamId, latestEventId })
- return { aborted: false, error: true }
- }
- }
+ if (chatHistory.resources.some((r) => r.id === 'streaming-file')) {
+ fetch('/api/copilot/chat/resources', {
+ method: 'DELETE',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ chatId: chatHistory.id,
+ resourceType: 'file',
+ resourceId: 'streaming-file',
+ }),
+ }).catch(() => {})
+ }
- if (isTerminalStreamStatus(streamStatus)) {
- logger.info('Existing stream already reached terminal status', {
- streamId,
- latestEventId,
- streamStatus,
- })
- if (streamStatus === 'error') {
- setError(RECONNECT_TAIL_ERROR)
- }
- return { aborted: false, error: streamStatus === 'error' }
- }
+ const persistedResources = chatHistory.resources.filter((r) => r.id !== 'streaming-file')
+ if (persistedResources.length > 0) {
+ setResources(persistedResources)
+ setActiveResourceId(persistedResources[persistedResources.length - 1].id)
- const activeAbortController = abortControllerRef.current
- if (!activeAbortController) {
- return { aborted: true, error: false }
- }
+ for (const resource of persistedResources) {
+ if (resource.type !== 'workflow') continue
+ ensureWorkflowInRegistry(resource.id, resource.title, workspaceId)
+ }
+ } else if (chatHistory.resources.some((r) => r.id === 'streaming-file')) {
+ setResources([])
+ setActiveResourceId(null)
+ }
- logger.info('Opening live stream tail', {
- streamId,
- fromEventId: latestEventId,
- attempt: attachAttempt,
- })
-
- const sseRes = await fetch(
- `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent(streamId)}&from=${latestEventId}`,
- { signal: activeAbortController.signal }
- )
- if (!sseRes.ok || !sseRes.body) {
- throw new Error(RECONNECT_TAIL_ERROR)
- }
-
- setIsReconnecting(false)
-
- const liveResult = await processSSEStreamRef.current(
- sseRes.body.getReader(),
- assistantId,
- {
- expectedGen,
- initialLastEventId: latestEventId,
- preserveExistingState: true,
- }
- )
- latestEventId = Math.max(latestEventId, liveResult.lastEventId)
- lastEventIdRef.current = latestEventId
-
- if (liveResult.sawStreamError) {
- logger.warn('Live stream tail ended with error event', { streamId, latestEventId })
- return { aborted: false, error: true }
- }
-
- attachAttempt += 1
- setIsReconnecting(true)
-
- logger.warn('Live stream ended without terminal event, fetching replay batch', {
- streamId,
- latestEventId,
- attempt: attachAttempt,
- })
-
- const batch = await fetchStreamBatch(
- streamId,
- latestEventId,
- activeAbortController.signal
- )
- seedEvents = batch.events
- streamStatus = batch.status
-
- if (batch.events.length > 0) {
- latestEventId = batch.events[batch.events.length - 1].eventId
- lastEventIdRef.current = latestEventId
- }
-
- logger.info('Fetched replay batch after non-terminal stream close', {
- streamId,
- latestEventId,
- streamStatus,
- eventCount: batch.events.length,
- attempt: attachAttempt,
- })
-
- if (batch.events.length === 0 && !isTerminalStreamStatus(batch.status)) {
- logger.info('No new replay events yet; reopening active stream tail', {
- streamId,
- latestEventId,
- streamStatus,
- attempt: attachAttempt,
- })
- if (activeAbortController.signal.aborted || streamGenRef.current !== expectedGen) {
- return { aborted: true, error: false }
- }
- }
- }
-
- return { aborted: true, error: false }
- } catch (err) {
- if (err instanceof Error && err.name === 'AbortError') {
- return { aborted: true, error: false }
- }
-
- logger.error('Failed to attach to existing stream, will throw for outer retry', {
- streamId,
- latestEventId,
- error: err instanceof Error ? err.message : String(err),
- })
- throw err
- } finally {
- setIsReconnecting(false)
- }
- },
- [fetchStreamBatch]
- )
-
- const applyChatHistorySnapshot = useCallback(
- (history: TaskChatHistory, options?: { preserveActiveStreamingMessage?: boolean }) => {
- const preserveActiveStreamingMessage = options?.preserveActiveStreamingMessage ?? false
- const activeStreamId = history.activeStreamId
- appliedChatIdRef.current = history.id
-
- const mappedMessages = history.messages.map(mapStoredMessage)
- const shouldPreserveActiveStreamingMessage =
- preserveActiveStreamingMessage &&
- sendingRef.current &&
- Boolean(activeStreamId) &&
- activeStreamId === streamIdRef.current
-
- if (shouldPreserveActiveStreamingMessage) {
- setMessages((prev) => {
- const localStreamingAssistant = prev[prev.length - 1]
- if (localStreamingAssistant?.role !== 'assistant') {
- return mappedMessages
- }
-
- const nextMessages =
- mappedMessages[mappedMessages.length - 1]?.role === 'assistant'
- ? mappedMessages.slice(0, -1)
- : mappedMessages
-
- return [...nextMessages, localStreamingAssistant]
- })
- } else {
- setMessages(mappedMessages)
- }
-
- if (history.resources.some((r) => r.id === 'streaming-file')) {
- fetch('/api/copilot/chat/resources', {
- method: 'DELETE',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({
- chatId: history.id,
- resourceType: 'file',
- resourceId: 'streaming-file',
- }),
- }).catch(() => {})
- }
-
- const persistedResources = history.resources.filter((r) => r.id !== 'streaming-file')
- if (persistedResources.length > 0) {
- setResources(persistedResources)
- const initialId = initialActiveResourceIdRef.current
- const restoredId =
- initialId && persistedResources.some((r) => r.id === initialId)
- ? initialId
- : persistedResources[persistedResources.length - 1].id
- setActiveResourceId(restoredId)
-
- for (const resource of persistedResources) {
- if (resource.type !== 'workflow') continue
- ensureWorkflowInRegistry(resource.id, resource.title, workspaceId)
- }
- } else if (history.resources.some((r) => r.id === 'streaming-file')) {
- setResources([])
- setActiveResourceId(null)
- }
- },
- [workspaceId]
- )
-
- const preparePendingStreamRecovery = useCallback(
- async (chatId: string): Promise => {
- const latestHistory = await fetchChatHistory(chatId)
- queryClient.setQueryData(taskKeys.detail(chatId), latestHistory)
- applyChatHistorySnapshot(latestHistory)
-
- if (!latestHistory.activeStreamId) {
- return null
- }
-
- return {
- streamId: latestHistory.activeStreamId,
- snapshot: latestHistory.streamSnapshot,
- }
- },
- [applyChatHistorySnapshot, queryClient]
- )
-
- useEffect(() => {
- if (!chatHistory) return
-
- const activeStreamId = chatHistory.activeStreamId
- const snapshot = chatHistory.streamSnapshot
- const isNewChat = appliedChatIdRef.current !== chatHistory.id
-
- if (isNewChat) {
- applyChatHistorySnapshot(chatHistory, { preserveActiveStreamingMessage: true })
- } else if (!activeStreamId || sendingRef.current) {
- return
+ const snapshotPreviewSessions = Array.isArray(chatHistory.streamSnapshot?.previewSessions)
+ ? (chatHistory.streamSnapshot.previewSessions as FilePreviewSession[])
+ : []
+ if (snapshotPreviewSessions.length > 0) {
+ seedPreviewSessions(snapshotPreviewSessions)
}
- if (activeStreamId && !sendingRef.current) {
+ if (shouldReconnectActiveStream && activeStreamId) {
const gen = ++streamGenRef.current
const abortController = new AbortController()
abortControllerRef.current = abortController
streamIdRef.current = activeStreamId
- lastEventIdRef.current = snapshot?.events?.[snapshot.events.length - 1]?.eventId ?? 0
+ lastCursorRef.current = '0'
sendingRef.current = true
- streamingContentRef.current = ''
- streamingBlocksRef.current = []
- clientExecutionStartedRef.current = extractToolCallIdsFromSnapshot(snapshot)
const assistantId = generateId()
const reconnect = async () => {
- const succeeded = await retryReconnectRef.current({
- streamId: activeStreamId,
- assistantId,
- gen,
- initialSnapshot: snapshot,
- })
+ const initialSnapshot = chatHistory.streamSnapshot
+ const snapshotEvents = Array.isArray(initialSnapshot?.events)
+ ? (initialSnapshot.events as StreamBatchEvent[])
+ : []
+
+ const reconnectResult =
+ snapshotEvents.length > 0
+ ? await attachToExistingStreamRef.current({
+ streamId: activeStreamId,
+ assistantId,
+ expectedGen: gen,
+ initialBatch: {
+ success: true,
+ events: snapshotEvents,
+ previewSessions: snapshotPreviewSessions,
+ status: initialSnapshot?.status ?? 'unknown',
+ },
+ afterCursor: String(snapshotEvents[snapshotEvents.length - 1]?.eventId ?? '0'),
+ })
+ : null
+
+ const succeeded =
+ reconnectResult !== null
+ ? !reconnectResult.error || reconnectResult.aborted
+ : await retryReconnectRef.current({
+ streamId: activeStreamId,
+ assistantId,
+ gen,
+ })
+ if (succeeded && streamGenRef.current === gen && sendingRef.current) {
+ finalizeRef.current()
+ return
+ }
if (!succeeded && streamGenRef.current === gen) {
try {
finalizeRef.current({ error: true })
@@ -933,51 +1193,65 @@ export function useChat(
}
reconnect()
}
- }, [applyChatHistorySnapshot, chatHistory, queryClient])
+ }, [
+ chatHistory,
+ workspaceId,
+ queryClient,
+ recoverPendingClientWorkflowTools,
+ seedPreviewSessions,
+ ])
const processSSEStream = useCallback(
async (
reader: ReadableStreamDefaultReader,
assistantId: string,
- options?: StreamProcessingOptions
+ expectedGen?: number,
+ options?: { preserveExistingState?: boolean }
) => {
- const { expectedGen, initialLastEventId = 0, preserveExistingState = false } = options ?? {}
const decoder = new TextDecoder()
streamReaderRef.current = reader
let buffer = ''
- const blocks: ContentBlock[] = preserveExistingState ? [...streamingBlocksRef.current] : []
+
+ const preserveState = options?.preserveExistingState === true
+ const blocks: ContentBlock[] = preserveState ? [...streamingBlocksRef.current] : []
const toolMap = new Map()
const toolArgsMap = new Map>()
- // Maps toolCallId → index in genericResourceDataRef.current.entries for fast lookup
- const genericEntryMap = new Map()
- if (preserveExistingState) {
- for (const [idx, entry] of genericResourceDataRef.current.entries.entries()) {
- genericEntryMap.set(entry.toolCallId, idx)
+
+ if (preserveState) {
+ for (let i = 0; i < blocks.length; i++) {
+ const tc = blocks[i].toolCall
+ if (tc) {
+ toolMap.set(tc.id, i)
+ if (tc.params) toolArgsMap.set(tc.id, tc.params)
+ }
}
}
- const clientExecutionStarted = clientExecutionStartedRef.current
+
let activeSubagent: string | undefined
+ let activeSubagentParentToolCallId: string | undefined
let activeCompactionId: string | undefined
- let runningText = preserveExistingState ? streamingContentRef.current : ''
+
+ if (preserveState) {
+ for (let i = blocks.length - 1; i >= 0; i--) {
+ if (blocks[i].type === 'subagent' && blocks[i].content) {
+ activeSubagent = blocks[i].content
+ break
+ }
+ if (blocks[i].type === 'subagent_end') {
+ break
+ }
+ }
+ }
+
+ let runningText = preserveState ? streamingContentRef.current || '' : ''
let lastContentSource: 'main' | 'subagent' | null = null
let streamRequestId: string | undefined
- let lastEventId = initialLastEventId
- let sawDoneEvent = false
- if (!preserveExistingState) {
+ if (!preserveState) {
streamingContentRef.current = ''
streamingBlocksRef.current = []
}
- for (const [index, block] of blocks.entries()) {
- if (block.type === 'tool_call' && block.toolCall?.id) {
- toolMap.set(block.toolCall.id, index)
- if (block.toolCall.params) {
- toolArgsMap.set(block.toolCall.id, block.toolCall.params)
- }
- }
- }
-
const ensureTextBlock = (): ContentBlock => {
const last = blocks[blocks.length - 1]
if (last?.type === 'text' && last.subagent === activeSubagent) return last
@@ -997,14 +1271,15 @@ export function useChat(
flush()
}
- const buildInlineErrorTag = (payload: SSEPayload) => {
- const data = getPayloadData(payload) as Record | undefined
+ const buildInlineErrorTag = (event: MothershipStreamV1EventEnvelope) => {
+ const data = getPayloadData(event)
const message =
- (data?.displayMessage as string | undefined) ||
- payload.error ||
+ (typeof data.displayMessage === 'string' ? data.displayMessage : undefined) ||
+ (typeof data.message === 'string' ? data.message : undefined) ||
+ (typeof data.error === 'string' ? data.error : undefined) ||
'An unexpected error occurred'
- const provider = (data?.provider as string | undefined) || undefined
- const code = (data?.code as string | undefined) || undefined
+ const provider = typeof data.provider === 'string' ? data.provider : undefined
+ const code = typeof data.code === 'string' ? data.code : undefined
return `${JSON.stringify({
message,
...(code ? { code } : {}),
@@ -1014,6 +1289,7 @@ export function useChat(
const isStale = () => expectedGen !== undefined && streamGenRef.current !== expectedGen
let sawStreamError = false
+ let sawCompleteEvent = false
const flush = () => {
if (isStale()) return
@@ -1036,504 +1312,575 @@ export function useChat(
})
}
- const appendGenericEntry = (entry: GenericResourceEntry): number => {
- const entries = [...genericResourceDataRef.current.entries, entry]
- genericResourceDataRef.current.entries = entries
- setGenericResourceData({ entries })
- return entries.length - 1
- }
-
- const updateGenericEntry = (
- entryIdx: number,
- changes: Partial
- ): void => {
- const entries = genericResourceDataRef.current.entries.slice()
- entries[entryIdx] = { ...entries[entryIdx], ...changes }
- genericResourceDataRef.current.entries = entries
- setGenericResourceData({ entries })
- }
-
try {
- while (true) {
- const { done, value } = await reader.read()
- if (done) break
- if (isStale()) continue
-
- buffer += decoder.decode(value, { stream: true })
- const lines = buffer.split('\n')
- buffer = lines.pop() || ''
-
- for (const line of lines) {
- if (isStale()) break
- if (!line.startsWith('data: ')) continue
- const raw = line.slice(6)
-
- let parsed: SSEPayload
- try {
- parsed = JSON.parse(raw)
- } catch {
+ const pendingLines: string[] = []
+
+ readLoop: while (true) {
+ if (pendingLines.length === 0) {
+ const { done, value } = await reader.read()
+ if (done) break
+ if (isStale()) continue
+
+ buffer += decoder.decode(value, { stream: true })
+ const lines = buffer.split('\n')
+ buffer = lines.pop() || ''
+ pendingLines.push(...lines)
+ if (pendingLines.length === 0) {
continue
}
+ }
- if (typeof (parsed as SSEPayload & { eventId?: unknown }).eventId === 'number') {
- lastEventId = Math.max(
- lastEventId,
- (parsed as SSEPayload & { eventId: number }).eventId
- )
- lastEventIdRef.current = lastEventId
- }
+ const line = pendingLines.shift()
+ if (line === undefined) {
+ continue
+ }
+ if (isStale()) {
+ pendingLines.length = 0
+ continue
+ }
+ if (!line.startsWith('data: ')) continue
+ const raw = line.slice(6)
- logger.debug('SSE event received', parsed)
- switch (parsed.type) {
- case 'chat_id': {
- if (parsed.chatId) {
- const isNewChat = !chatIdRef.current
- chatIdRef.current = parsed.chatId
- const selected = selectedChatIdRef.current
- if (selected == null) {
- if (isNewChat) {
- setResolvedChatId(parsed.chatId)
- }
- } else if (parsed.chatId === selected) {
- setResolvedChatId(parsed.chatId)
- }
- queryClient.invalidateQueries({
- queryKey: taskKeys.list(workspaceId),
- })
+ let parsed: MothershipStreamV1EventEnvelope
+ try {
+ parsed = JSON.parse(raw)
+ } catch {
+ continue
+ }
+
+ if (parsed.trace?.requestId && parsed.trace.requestId !== streamRequestId) {
+ streamRequestId = parsed.trace.requestId
+ flush()
+ }
+ if (parsed.stream?.streamId) {
+ streamIdRef.current = parsed.stream.streamId
+ }
+ if (parsed.stream?.cursor) {
+ lastCursorRef.current = parsed.stream.cursor
+ } else if (typeof parsed.seq === 'number') {
+ lastCursorRef.current = String(parsed.seq)
+ }
+
+ logger.debug('SSE event received', parsed)
+ switch (parsed.type) {
+ case MothershipStreamV1EventType.session: {
+ const payload = getPayloadData(parsed)
+ const kind = typeof payload.kind === 'string' ? payload.kind : ''
+ const payloadChatId =
+ typeof payload.chatId === 'string'
+ ? payload.chatId
+ : typeof parsed.stream?.chatId === 'string'
+ ? parsed.stream.chatId
+ : undefined
+ if (kind === MothershipStreamV1SessionKind.chat && payloadChatId) {
+ const isNewChat = !chatIdRef.current
+ chatIdRef.current = payloadChatId
+ const selected = selectedChatIdRef.current
+ if (selected == null) {
if (isNewChat) {
- const userMsg = pendingUserMsgRef.current
- const activeStreamId = streamIdRef.current
- if (userMsg && activeStreamId) {
- queryClient.setQueryData(taskKeys.detail(parsed.chatId), {
- id: parsed.chatId,
- title: null,
- messages: [
- {
- id: userMsg.id,
- role: 'user',
- content: userMsg.content,
- },
- ],
- activeStreamId,
- resources: [],
- })
- }
- if (!workflowIdRef.current) {
- window.history.replaceState(
- null,
- '',
- `/workspace/${workspaceId}/task/${parsed.chatId}`
- )
- }
+ setResolvedChatId(payloadChatId)
}
+ } else if (payloadChatId === selected) {
+ setResolvedChatId(payloadChatId)
}
- break
- }
- case 'request_id': {
- const rid = typeof parsed.data === 'string' ? parsed.data : undefined
- if (rid) {
- streamRequestId = rid
- flush()
+ queryClient.invalidateQueries({
+ queryKey: taskKeys.list(workspaceId),
+ })
+ if (isNewChat) {
+ const userMsg = pendingUserMsgRef.current
+ const activeStreamId = streamIdRef.current
+ if (userMsg && activeStreamId) {
+ queryClient.setQueryData(taskKeys.detail(payloadChatId), {
+ id: payloadChatId,
+ title: null,
+ messages: [
+ {
+ id: userMsg.id,
+ role: 'user',
+ content: userMsg.content,
+ timestamp: new Date().toISOString(),
+ },
+ ],
+ activeStreamId,
+ resources: [],
+ })
+ }
+ if (!workflowIdRef.current) {
+ window.history.replaceState(
+ null,
+ '',
+ `/workspace/${workspaceId}/task/${payloadChatId}`
+ )
+ }
}
- break
}
- case 'content': {
- const chunk = typeof parsed.data === 'string' ? parsed.data : (parsed.content ?? '')
- if (chunk) {
- const contentSource: 'main' | 'subagent' = activeSubagent ? 'subagent' : 'main'
- const needsBoundaryNewline =
- lastContentSource !== null &&
- lastContentSource !== contentSource &&
- runningText.length > 0 &&
- !runningText.endsWith('\n')
- const tb = ensureTextBlock()
- const normalizedChunk = needsBoundaryNewline ? `\n${chunk}` : chunk
- tb.content = (tb.content ?? '') + normalizedChunk
- if (activeSubagent) tb.subagent = activeSubagent
- runningText += normalizedChunk
- lastContentSource = contentSource
- streamingContentRef.current = runningText
- flush()
- }
- break
+ if (kind === MothershipStreamV1SessionKind.title) {
+ queryClient.invalidateQueries({
+ queryKey: taskKeys.list(workspaceId),
+ })
+ onTitleUpdateRef.current?.()
}
- case 'reasoning': {
- const d = (
- parsed.data && typeof parsed.data === 'object' ? parsed.data : {}
- ) as Record
- const phase = d.phase as string | undefined
- if (phase === 'start') {
- const tb = ensureTextBlock()
- tb.content = `${tb.content ?? ''}`
- runningText += ''
- streamingContentRef.current = runningText
- flush()
- } else if (phase === 'end') {
- const tb = ensureTextBlock()
- tb.content = `${tb.content ?? ''}`
- runningText += ''
- streamingContentRef.current = runningText
- flush()
- } else {
- const chunk =
- typeof d.data === 'string' ? d.data : (parsed.content as string | undefined)
- if (chunk) {
- const tb = ensureTextBlock()
- tb.content = (tb.content ?? '') + chunk
- runningText += chunk
- streamingContentRef.current = runningText
- flush()
- }
- }
- break
+ break
+ }
+ case MothershipStreamV1EventType.text: {
+ const payload = getPayloadData(parsed)
+ const chunk = typeof payload.text === 'string' ? payload.text : ''
+ if (chunk) {
+ const contentSource: 'main' | 'subagent' = activeSubagent ? 'subagent' : 'main'
+ const needsBoundaryNewline =
+ lastContentSource !== null &&
+ lastContentSource !== contentSource &&
+ runningText.length > 0 &&
+ !runningText.endsWith('\n')
+ const tb = ensureTextBlock()
+ const normalizedChunk = needsBoundaryNewline ? `\n${chunk}` : chunk
+ tb.content = (tb.content ?? '') + normalizedChunk
+ if (activeSubagent) tb.subagent = activeSubagent
+ runningText += normalizedChunk
+ lastContentSource = contentSource
+ streamingContentRef.current = runningText
+ flush()
}
- case 'tool_generating':
- case 'tool_call': {
- const id = parsed.toolCallId
- const data = getPayloadData(parsed)
- const name = parsed.toolName || data?.name || 'unknown'
- const isPartial = data?.partial === true
- if (!id) break
-
- if (name === 'tool_search_tool_regex') {
- break
+ break
+ }
+ case MothershipStreamV1EventType.tool: {
+ const payload = getPayloadData(parsed)
+ const previewPhase =
+ typeof payload.previewPhase === 'string' ? payload.previewPhase : undefined
+ const phase =
+ typeof payload.phase === 'string' ? payload.phase : MothershipStreamV1ToolPhase.call
+ const id =
+ typeof payload.toolCallId === 'string'
+ ? payload.toolCallId
+ : typeof payload.id === 'string'
+ ? payload.id
+ : undefined
+ if (!id) break
+
+ if (previewPhase) {
+ const prevSession = previewSessionsRef.current[id]
+ const target = asPayloadRecord(payload.target)
+ const targetKind =
+ payload.targetKind === 'new_file' || payload.targetKind === 'file_id'
+ ? (payload.targetKind as 'new_file' | 'file_id')
+ : target?.kind === 'new_file' || target?.kind === 'file_id'
+ ? (target.kind as 'new_file' | 'file_id')
+ : prevSession?.targetKind
+ const fileId =
+ typeof payload.fileId === 'string'
+ ? payload.fileId
+ : typeof target?.fileId === 'string'
+ ? target.fileId
+ : prevSession?.fileId
+ const fileName =
+ typeof payload.fileName === 'string'
+ ? payload.fileName
+ : typeof target?.fileName === 'string'
+ ? target.fileName
+ : (prevSession?.fileName ?? '')
+ const operation =
+ typeof payload.operation === 'string' ? payload.operation : prevSession?.operation
+ const edit = asPayloadRecord(payload.edit) ?? prevSession?.edit
+ const streamId = parsed.stream?.streamId ?? prevSession?.streamId ?? ''
+ const nextPreviewVersion =
+ typeof payload.previewVersion === 'number' &&
+ Number.isFinite(payload.previewVersion)
+ ? payload.previewVersion
+ : (prevSession?.previewVersion ?? 0) + 1
+ const baseSession: FilePreviewSession = {
+ schemaVersion: 1,
+ id,
+ streamId,
+ toolCallId: id,
+ status: prevSession?.status ?? 'pending',
+ fileName,
+ ...(fileId ? { fileId } : {}),
+ ...(targetKind ? { targetKind } : {}),
+ ...(operation ? { operation } : {}),
+ ...(edit ? { edit } : {}),
+ previewText: prevSession?.previewText ?? '',
+ previewVersion: prevSession?.previewVersion ?? 0,
+ updatedAt: prevSession?.updatedAt ?? new Date().toISOString(),
+ ...(prevSession?.completedAt ? { completedAt: prevSession.completedAt } : {}),
}
- const ui = parsed.ui || data?.ui
- if (ui?.hidden) break
- const displayTitle = ui?.title || ui?.phaseLabel
- const phaseLabel = ui?.phaseLabel
- const args = (data?.arguments ?? data?.input) as Record | undefined
- if (!toolMap.has(id)) {
- toolMap.set(id, blocks.length)
- blocks.push({
- type: 'tool_call',
- toolCall: {
- id,
- name,
- status: 'executing',
- displayTitle,
- phaseLabel,
- params: args,
- calledBy: activeSubagent,
- },
- })
- if (name === 'read' || isResourceToolName(name)) {
- if (args) toolArgsMap.set(id, args)
+
+ if (previewPhase === 'file_preview_start') {
+ const nextSession: FilePreviewSession = {
+ ...baseSession,
+ status: 'pending',
+ updatedAt: new Date().toISOString(),
}
- } else {
- const idx = toolMap.get(id)!
- const tc = blocks[idx].toolCall
- if (tc) {
- tc.name = name
- if (displayTitle) tc.displayTitle = displayTitle
- if (phaseLabel) tc.phaseLabel = phaseLabel
- if (args) tc.params = args
+ if (nextSession.fileId) {
+ setActiveResourceId(nextSession.fileId)
}
+ applyPreviewSessionUpdate(nextSession)
+ break
}
- flush()
- // TODO: Uncomment when rich UI for Results tab is ready
- // if (shouldOpenGenericResource(name)) {
- // if (!genericEntryMap.has(id)) {
- // const entryIdx = appendGenericEntry({
- // toolCallId: id,
- // toolName: name,
- // displayTitle: displayTitle ?? name,
- // status: 'executing',
- // params: args,
- // })
- // genericEntryMap.set(id, entryIdx)
- // const opened = addResource({ type: 'generic', id: 'results', title: 'Results' })
- // if (opened) onResourceEventRef.current?.()
- // else setActiveResourceId('results')
- // } else {
- // const entryIdx = genericEntryMap.get(id)
- // if (entryIdx !== undefined) {
- // updateGenericEntry(entryIdx, {
- // toolName: name,
- // ...(displayTitle && { displayTitle }),
- // ...(args && { params: args }),
- // })
- // }
- // }
- // }
+ if (previewPhase === 'file_preview_target') {
+ const nextSession: FilePreviewSession = {
+ ...baseSession,
+ updatedAt: new Date().toISOString(),
+ }
+ const nextState = applyPreviewSessionUpdate(nextSession)
+ const activePreview =
+ nextState.activeSessionId !== null
+ ? (nextState.sessions[nextState.activeSessionId] ?? null)
+ : null
+ if (activePreview?.id === nextSession.id) {
+ syncPreviewResourceChrome(activePreview)
+ }
+ break
+ }
- if (
- parsed.type === 'tool_call' &&
- ui?.clientExecutable &&
- isWorkflowToolName(name) &&
- !isPartial &&
- !clientExecutionStarted.has(id)
- ) {
- clientExecutionStarted.add(id)
- const args = data?.arguments ?? data?.input ?? {}
- const targetWorkflowId =
- typeof (args as Record).workflowId === 'string'
- ? ((args as Record).workflowId as string)
- : useWorkflowRegistry.getState().activeWorkflowId
- if (targetWorkflowId) {
- const meta = getWorkflowById(workspaceId, targetWorkflowId)
- const wasAdded = addResource({
- type: 'workflow',
- id: targetWorkflowId,
- title: meta?.name ?? 'Workflow',
- })
- if (!wasAdded && activeResourceIdRef.current !== targetWorkflowId) {
- setActiveResourceId(targetWorkflowId)
- }
- onResourceEventRef.current?.()
+ if (previewPhase === 'file_preview_edit_meta') {
+ const nextSession: FilePreviewSession = {
+ ...baseSession,
+ status: prevSession?.status ?? 'pending',
+ updatedAt: new Date().toISOString(),
}
- executeRunToolOnClient(id, name, args as Record)
+ applyPreviewSessionUpdate(nextSession)
+ break
}
- break
- }
- case 'tool_call_delta': {
- const id = parsed.toolCallId
- const delta = typeof parsed.data === 'string' ? parsed.data : ''
- if (!id || !delta) break
-
- const toolName = typeof parsed.toolName === 'string' ? parsed.toolName : ''
- const streamWorkspaceFile =
- activeSubagent === 'file_write' || toolName === 'workspace_file'
-
- if (streamWorkspaceFile) {
- let prev = streamingFileRef.current
- if (!prev) {
- prev = { fileName: '', content: '' }
- streamingFileRef.current = prev
- setStreamingFile(prev)
+
+ if (previewPhase === 'file_preview_content') {
+ const content = typeof payload.content === 'string' ? payload.content : ''
+ const contentMode = payload.contentMode === 'delta' ? 'delta' : 'snapshot'
+ const nextPreviewText =
+ contentMode === 'delta' ? (prevSession?.previewText ?? '') + content : content
+ const nextSession: FilePreviewSession = {
+ ...baseSession,
+ status: 'streaming',
+ previewText: nextPreviewText,
+ previewVersion: nextPreviewVersion,
+ updatedAt: new Date().toISOString(),
}
- const raw = prev.content + delta
- let fileName = prev.fileName
- if (!fileName) {
- const m = raw.match(/"fileName"\s*:\s*"([^"]+)"/)
- if (m) {
- fileName = m[1]
- }
+ applyPreviewSessionUpdate(nextSession)
+ const previewToolIdx = toolMap.get(id)
+ if (previewToolIdx !== undefined && blocks[previewToolIdx].toolCall) {
+ blocks[previewToolIdx].toolCall!.status = 'executing'
}
- const fileIdMatch = raw.match(/"fileId"\s*:\s*"([^"]+)"/)
- const matchedResourceId = fileIdMatch?.[1]
- if (
- matchedResourceId &&
- resourcesRef.current.some(
- (resource) => resource.type === 'file' && resource.id === matchedResourceId
- )
- ) {
- setActiveResourceId(matchedResourceId)
- setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
- } else if (fileName || fileIdMatch) {
- const hasStreamingResource = resourcesRef.current.some(
- (resource) => resource.id === 'streaming-file'
- )
- if (!hasStreamingResource) {
- addResource({
- type: 'file',
- id: 'streaming-file',
- title: fileName || 'Writing file...',
- })
- } else if (fileName) {
- setResources((rs) =>
- rs.map((resource) =>
- resource.id === 'streaming-file'
- ? { ...resource, title: fileName }
- : resource
- )
+ break
+ }
+
+ if (previewPhase === 'file_preview_complete') {
+ const resultData = asPayloadRecord(payload.output)
+ const completedAt = new Date().toISOString()
+ const nextSession: FilePreviewSession = {
+ ...baseSession,
+ status: 'complete',
+ previewVersion:
+ typeof payload.previewVersion === 'number' &&
+ Number.isFinite(payload.previewVersion)
+ ? payload.previewVersion
+ : (prevSession?.previewVersion ?? 0),
+ updatedAt: completedAt,
+ completedAt,
+ }
+ const nextState = applyCompletedPreviewSession(nextSession)
+
+ if (fileId && resultData?.id) {
+ const fileName = (resultData.name as string) ?? nextSession.fileName ?? 'File'
+ const fileResource = { type: 'file' as const, id: fileId, title: fileName }
+ setResources((rs) => {
+ const without = rs.filter((r) => r.id !== 'streaming-file')
+ if (without.some((r) => r.type === 'file' && r.id === fileResource.id)) {
+ return without
+ }
+ return [...without, fileResource]
+ })
+ setActiveResourceId(fileId)
+ if (nextSession.previewText) {
+ queryClient.setQueryData(
+ workspaceFilesKeys.content(workspaceId, fileId, 'text'),
+ nextSession.previewText
)
}
+ invalidateResourceQueries(queryClient, workspaceId, 'file', fileId)
+ } else {
+ const activePreview =
+ nextState.activeSessionId !== null
+ ? (nextState.sessions[nextState.activeSessionId] ?? null)
+ : null
+ if (activePreview) {
+ syncPreviewResourceChrome(activePreview)
+ }
}
- const next = { fileName, content: raw }
- streamingFileRef.current = next
- setStreamingFile(next)
+ break
}
+ }
+
+ if (phase === MothershipStreamV1ToolPhase.args_delta) {
+ const delta =
+ typeof payload.argumentsDelta === 'string' ? payload.argumentsDelta : ''
+ if (!delta) break
const idx = toolMap.get(id)
if (idx !== undefined && blocks[idx].toolCall) {
const tc = blocks[idx].toolCall!
tc.streamingArgs = (tc.streamingArgs ?? '') + delta
+ const displayTitle = resolveStreamingToolDisplayTitle(tc.name, tc.streamingArgs)
+ if (displayTitle) tc.displayTitle = displayTitle
+
flush()
}
-
- // TODO: Uncomment when rich UI for Results tab is ready
- // if (toolName && shouldOpenGenericResource(toolName)) {
- // const entryIdx = genericEntryMap.get(id)
- // if (entryIdx !== undefined) {
- // const entry = genericResourceDataRef.current.entries[entryIdx]
- // if (entry) {
- // updateGenericEntry(entryIdx, {
- // streamingArgs: (entry.streamingArgs ?? '') + delta,
- // })
- // }
- // }
- // }
-
break
}
- case 'tool_result': {
- const id = parsed.toolCallId || getPayloadData(parsed)?.id
- if (!id) break
+
+ if (phase === MothershipStreamV1ToolPhase.result) {
const idx = toolMap.get(id)
- if (idx !== undefined && blocks[idx].toolCall) {
- const tc = blocks[idx].toolCall!
+ if (idx === undefined || !blocks[idx].toolCall) {
+ break
+ }
+ const tc = blocks[idx].toolCall!
+ const outputObj = asPayloadRecord(payload.output)
+ const success =
+ typeof payload.success === 'boolean'
+ ? payload.success
+ : payload.status === MothershipStreamV1ToolOutcome.success
+ const isCancelled =
+ outputObj?.reason === 'user_cancelled' ||
+ outputObj?.cancelledByUser === true ||
+ payload.reason === 'user_cancelled' ||
+ payload.cancelledByUser === true ||
+ payload.status === MothershipStreamV1ToolOutcome.cancelled
+
+ if (isCancelled) {
+ tc.status = 'cancelled'
+ tc.displayTitle = 'Stopped by user'
+ } else {
+ tc.status = success ? 'success' : 'error'
+ }
+ tc.streamingArgs = undefined
+ tc.result = {
+ success: !!success,
+ output: payload.output,
+ error: typeof payload.error === 'string' ? payload.error : undefined,
+ }
+ flush()
- const payloadData = getPayloadData(parsed)
- const resultObj =
- parsed.result && typeof parsed.result === 'object'
- ? (parsed.result as Record)
- : undefined
- const isCancelled =
- resultObj?.reason === 'user_cancelled' ||
- resultObj?.cancelledByUser === true ||
- (payloadData as Record | undefined)?.reason ===
- 'user_cancelled' ||
- (payloadData as Record | undefined)?.cancelledByUser === true
-
- if (isCancelled) {
- tc.status = 'cancelled'
- tc.displayTitle = 'Stopped by user'
- } else {
- tc.status = parsed.success ? 'success' : 'error'
- }
- tc.streamingArgs = undefined
- tc.result = {
- success: !!parsed.success,
- output: parsed.result ?? getPayloadData(parsed)?.result,
- error: (parsed.error ?? getPayloadData(parsed)?.error) as string | undefined,
+ if (tc.name === ReadTool.id && tc.status === 'success') {
+ const readArgs = toolArgsMap.get(id)
+ const resource = extractResourceFromReadResult(
+ readArgs?.path as string | undefined,
+ tc.result.output
+ )
+ if (resource && addResource(resource)) {
+ onResourceEventRef.current?.()
}
- flush()
+ }
- if (tc.name === 'read' && tc.status === 'success') {
- const readArgs = toolArgsMap.get(id)
- const resource = extractResourceFromReadResult(
- readArgs?.path as string | undefined,
- tc.result.output
- )
- if (resource && addResource(resource)) {
- onResourceEventRef.current?.()
- }
+ if (DEPLOY_TOOL_NAMES.has(tc.name) && tc.status === 'success') {
+ const output = tc.result?.output as Record | undefined
+ const deployedWorkflowId = (output?.workflowId as string) ?? undefined
+ if (deployedWorkflowId && typeof output?.isDeployed === 'boolean') {
+ queryClient.invalidateQueries({
+ queryKey: deploymentKeys.info(deployedWorkflowId),
+ })
+ queryClient.invalidateQueries({
+ queryKey: deploymentKeys.versions(deployedWorkflowId),
+ })
+ queryClient.invalidateQueries({
+ queryKey: workflowKeys.list(workspaceId),
+ })
}
+ }
- if (DEPLOY_TOOL_NAMES.has(tc.name) && tc.status === 'success') {
- const output = tc.result?.output as Record | undefined
- const deployedWorkflowId = (output?.workflowId as string) ?? undefined
- if (deployedWorkflowId && typeof output?.isDeployed === 'boolean') {
- queryClient.invalidateQueries({
- queryKey: deploymentKeys.info(deployedWorkflowId),
- })
- queryClient.invalidateQueries({
- queryKey: deploymentKeys.versions(deployedWorkflowId),
- })
- queryClient.invalidateQueries({
- queryKey: workflowKeys.list(workspaceId),
- })
- }
- }
+ if (FOLDER_TOOL_NAMES.has(tc.name) && tc.status === 'success') {
+ queryClient.invalidateQueries({
+ queryKey: folderKeys.list(workspaceId),
+ })
+ }
+ if (WORKFLOW_MUTATION_TOOL_NAMES.has(tc.name) && tc.status === 'success') {
+ queryClient.invalidateQueries({
+ queryKey: workflowKeys.list(workspaceId),
+ })
+ }
- const extractedResources =
- tc.status === 'success' && isResourceToolName(tc.name)
- ? extractResourcesFromToolResult(
- tc.name,
- toolArgsMap.get(id) as Record | undefined,
- tc.result?.output
- )
- : []
-
- for (const resource of extractedResources) {
- invalidateResourceQueries(queryClient, workspaceId, resource.type, resource.id)
- }
+ const extractedResources =
+ tc.status === 'success' && isResourceToolName(tc.name)
+ ? extractResourcesFromToolResult(
+ tc.name,
+ toolArgsMap.get(id) as Record | undefined,
+ tc.result?.output
+ )
+ : []
- onToolResultRef.current?.(tc.name, tc.status === 'success', tc.result?.output)
-
- if (tc.name === 'workspace_file') {
- setStreamingFile(null)
- streamingFileRef.current = null
-
- const fileResource = extractedResources.find((r) => r.type === 'file')
- if (fileResource) {
- setResources((rs) => {
- const without = rs.filter((r) => r.id !== 'streaming-file')
- if (without.some((r) => r.type === 'file' && r.id === fileResource.id)) {
- return without
- }
- return [...without, fileResource]
- })
- setActiveResourceId(fileResource.id)
- } else {
- setResources((rs) => rs.filter((r) => r.id !== 'streaming-file'))
- }
- }
+ for (const resource of extractedResources) {
+ invalidateResourceQueries(queryClient, workspaceId, resource.type, resource.id)
+ }
- // TODO: Uncomment when rich UI for Results tab is ready
- // if (
- // shouldOpenGenericResource(tc.name) ||
- // (isDeferredResourceTool(tc.name) && extractedResources.length === 0)
- // ) {
- // const entryIdx = genericEntryMap.get(id)
- // if (entryIdx !== undefined) {
- // updateGenericEntry(entryIdx, {
- // status: tc.status,
- // result: tc.result ?? undefined,
- // streamingArgs: undefined,
- // })
- // } else {
- // const newIdx = appendGenericEntry({
- // toolCallId: id,
- // toolName: tc.name,
- // displayTitle: tc.displayTitle ?? tc.name,
- // status: tc.status,
- // params: toolArgsMap.get(id) as Record | undefined,
- // result: tc.result ?? undefined,
- // })
- // genericEntryMap.set(id, newIdx)
- // if (addResource({ type: 'generic', id: 'results', title: 'Results' })) {
- // onResourceEventRef.current?.()
- // }
- // }
- // }
+ onToolResultRef.current?.(tc.name, tc.status === 'success', tc.result?.output)
+
+ if (isWorkflowToolName(tc.name)) {
+ clientExecutionStartedRef.current.delete(id)
}
- break
- }
- case 'resource_added': {
- const resource = parsed.resource
- if (resource?.type && resource?.id) {
- const wasAdded = addResource(resource)
- invalidateResourceQueries(queryClient, workspaceId, resource.type, resource.id)
+ const workspaceFileOperation =
+ tc.name === WorkspaceFile.id && typeof tc.params?.operation === 'string'
+ ? tc.params.operation
+ : undefined
+ const shouldKeepWorkspacePreviewOpen =
+ tc.name === WorkspaceFile.id &&
+ (workspaceFileOperation === 'append' ||
+ workspaceFileOperation === 'update' ||
+ workspaceFileOperation === 'patch')
- if (!wasAdded && activeResourceIdRef.current !== resource.id) {
- setActiveResourceId(resource.id)
+ if (
+ (tc.name === WorkspaceFile.id || tc.name === 'edit_content') &&
+ !shouldKeepWorkspacePreviewOpen
+ ) {
+ if (tc.name === WorkspaceFile.id) {
+ removePreviewSessionImmediate(id)
}
- onResourceEventRef.current?.()
-
- if (resource.type === 'workflow') {
- const wasRegistered = ensureWorkflowInRegistry(
- resource.id,
- resource.title,
- workspaceId
- )
- if (wasAdded && wasRegistered) {
- useWorkflowRegistry.getState().setActiveWorkflow(resource.id)
- } else {
- useWorkflowRegistry.getState().loadWorkflowState(resource.id)
- }
+ const fileResource = extractedResources.find((r) => r.type === 'file')
+ if (fileResource) {
+ setResources((rs) => {
+ const without = rs.filter((r) => r.id !== 'streaming-file')
+ if (without.some((r) => r.type === 'file' && r.id === fileResource.id)) {
+ return without
+ }
+ return [...without, fileResource]
+ })
+ setActiveResourceId(fileResource.id)
+ invalidateResourceQueries(queryClient, workspaceId, 'file', fileResource.id)
+ } else if (!activeSubagent || activeSubagent !== FILE_SUBAGENT_ID) {
+ setResources((rs) => rs.filter((r) => r.id !== 'streaming-file'))
}
}
break
}
- case 'resource_deleted': {
- const resource = parsed.resource
- if (resource?.type && resource?.id) {
- removeResource(resource.type as MothershipResourceType, resource.id)
- invalidateResourceQueries(
- queryClient,
- workspaceId,
- resource.type as MothershipResourceType,
- resource.id
- )
- onResourceEventRef.current?.()
+
+ const name =
+ typeof payload.toolName === 'string'
+ ? payload.toolName
+ : typeof payload.name === 'string'
+ ? payload.name
+ : 'unknown'
+ const isPartial = payload.partial === true
+ if (name === ToolSearchToolRegex.id || isToolHiddenInUi(name)) {
+ break
+ }
+ const ui = getToolUI(payload)
+ if (ui?.hidden) break
+ let displayTitle = ui?.title || ui?.phaseLabel
+ const phaseLabel = ui?.phaseLabel
+ const args = (asPayloadRecord(payload.arguments) ?? asPayloadRecord(payload.input)) as
+ | Record
+ | undefined
+
+ displayTitle = resolveToolDisplayTitle(name, args) ?? displayTitle
+
+ if (name === 'edit_content') {
+ const parentToolCallId =
+ activePreviewSessionIdRef.current ?? previewSessionRef.current?.toolCallId
+ const parentIdx =
+ parentToolCallId !== null && parentToolCallId !== undefined
+ ? toolMap.get(parentToolCallId)
+ : undefined
+ if (parentIdx !== undefined && blocks[parentIdx].toolCall) {
+ toolMap.set(id, parentIdx)
+ const tc = blocks[parentIdx].toolCall!
+ tc.status = 'executing'
+ tc.result = undefined
+ flush()
+ break
}
+ }
+
+ if (!toolMap.has(id)) {
+ toolMap.set(id, blocks.length)
+ blocks.push({
+ type: 'tool_call',
+ toolCall: {
+ id,
+ name,
+ status: 'executing',
+ displayTitle,
+ phaseLabel,
+ params: args,
+ calledBy: activeSubagent,
+ },
+ })
+ if (name === ReadTool.id || isResourceToolName(name)) {
+ if (args) toolArgsMap.set(id, args)
+ }
+ } else {
+ const idx = toolMap.get(id)!
+ const tc = blocks[idx].toolCall
+ if (tc) {
+ tc.name = name
+ if (displayTitle) tc.displayTitle = displayTitle
+ if (phaseLabel) tc.phaseLabel = phaseLabel
+ if (args) tc.params = args
+ }
+ }
+ flush()
+
+ if (isWorkflowToolName(name) && !isPartial) {
+ startClientWorkflowTool(id, name, args ?? {})
+ }
+ break
+ }
+ case MothershipStreamV1EventType.resource: {
+ const payload = getPayloadData(parsed)
+ const resource = asPayloadRecord(payload.resource)
+ if (
+ !resource ||
+ typeof resource.type !== 'string' ||
+ typeof resource.id !== 'string'
+ ) {
break
}
- case 'context_compaction_start': {
+
+ if (payload.op === MothershipStreamV1ResourceOp.remove) {
+ removeResource(resource.type as MothershipResourceType, resource.id)
+ invalidateResourceQueries(
+ queryClient,
+ workspaceId,
+ resource.type as MothershipResourceType,
+ resource.id
+ )
+ onResourceEventRef.current?.()
+ break
+ }
+
+ const nextResource = {
+ type: resource.type as MothershipResourceType,
+ id: resource.id,
+ title: typeof resource.title === 'string' ? resource.title : resource.id,
+ }
+ const wasAdded = addResource(nextResource)
+ invalidateResourceQueries(
+ queryClient,
+ workspaceId,
+ nextResource.type,
+ nextResource.id
+ )
+
+ if (!wasAdded && activeResourceIdRef.current !== nextResource.id) {
+ setActiveResourceId(nextResource.id)
+ }
+ onResourceEventRef.current?.()
+
+ if (nextResource.type === 'workflow') {
+ const wasRegistered = ensureWorkflowInRegistry(
+ nextResource.id,
+ nextResource.title,
+ workspaceId
+ )
+ if (wasAdded && wasRegistered) {
+ useWorkflowRegistry.getState().setActiveWorkflow(nextResource.id)
+ } else {
+ useWorkflowRegistry.getState().loadWorkflowState(nextResource.id)
+ }
+ }
+ break
+ }
+ case MothershipStreamV1EventType.run: {
+ const payload = getPayloadData(parsed)
+ const kind = typeof payload.kind === 'string' ? payload.kind : ''
+ if (kind === MothershipStreamV1RunKind.compaction_start) {
const compactionId = `compaction_${Date.now()}`
activeCompactionId = compactionId
toolMap.set(compactionId, blocks.length)
@@ -1547,9 +1894,7 @@ export function useChat(
},
})
flush()
- break
- }
- case 'context_compaction': {
+ } else if (kind === MothershipStreamV1RunKind.compaction_done) {
const compactionId = activeCompactionId || `compaction_${Date.now()}`
activeCompactionId = undefined
const idx = toolMap.get(compactionId)
@@ -1569,88 +1914,370 @@ export function useChat(
})
}
flush()
- break
}
- case 'tool_error': {
- const id = parsed.toolCallId || getPayloadData(parsed)?.id
- if (!id) break
- const idx = toolMap.get(id)
- if (idx !== undefined && blocks[idx].toolCall) {
- const toolCallName = blocks[idx].toolCall!.name
- blocks[idx].toolCall!.status = 'error'
- if (toolCallName === 'workspace_file') {
- setStreamingFile(null)
- streamingFileRef.current = null
- setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
- }
- flush()
-
- // TODO: Uncomment when rich UI for Results tab is ready
- // if (toolCallName && shouldOpenGenericResource(toolCallName)) {
- // const entryIdx = genericEntryMap.get(id)
- // if (entryIdx !== undefined) {
- // updateGenericEntry(entryIdx, { status: 'error', streamingArgs: undefined })
- // }
- // }
- }
+ break
+ }
+ case MothershipStreamV1EventType.span: {
+ const payload = getPayloadData(parsed)
+ const kind = typeof payload.kind === 'string' ? payload.kind : ''
+ if (kind !== MothershipStreamV1SpanPayloadKind.subagent) {
break
}
- case 'subagent_start': {
- const name = parsed.subagent || getPayloadData(parsed)?.agent
- if (name) {
- activeSubagent = name
+ const spanEvent = typeof payload.event === 'string' ? payload.event : ''
+ const spanData = asPayloadRecord(payload.data)
+ const parentToolCallId =
+ typeof parsed.scope?.parentToolCallId === 'string'
+ ? parsed.scope.parentToolCallId
+ : typeof spanData?.tool_call_id === 'string'
+ ? spanData.tool_call_id
+ : undefined
+ const isPendingPause = spanData?.pending === true
+ const name =
+ typeof payload.agent === 'string'
+ ? payload.agent
+ : typeof parsed.scope?.agentId === 'string'
+ ? parsed.scope.agentId
+ : undefined
+ if (spanEvent === MothershipStreamV1SpanLifecycleEvent.start && name) {
+ const isSameActiveSubagent =
+ activeSubagent === name &&
+ activeSubagentParentToolCallId &&
+ parentToolCallId === activeSubagentParentToolCallId
+ activeSubagent = name
+ activeSubagentParentToolCallId = parentToolCallId
+ if (!isSameActiveSubagent) {
blocks.push({ type: 'subagent', content: name })
- if (name === 'file_write') {
- const emptyFile = { fileName: '', content: '' }
- // Ref must be updated synchronously: tool_call_delta can arrive before React
- // re-renders after setStreamingFile, and the handler only appends when prev exists.
- streamingFileRef.current = emptyFile
- setStreamingFile(emptyFile)
+ }
+ if (name === FILE_SUBAGENT_ID && !isSameActiveSubagent) {
+ applyPreviewSessionUpdate({
+ schemaVersion: 1,
+ id: parentToolCallId || 'file-preview',
+ streamId: streamIdRef.current ?? '',
+ toolCallId: parentToolCallId || 'file-preview',
+ status: 'pending',
+ fileName: '',
+ previewText: '',
+ previewVersion: 0,
+ updatedAt: new Date().toISOString(),
+ })
+ }
+ flush()
+ } else if (spanEvent === MothershipStreamV1SpanLifecycleEvent.end) {
+ if (isPendingPause) {
+ break
+ }
+ if (previewSessionRef.current && !activePreviewSessionIdRef.current) {
+ const lastFileResource = resourcesRef.current.find(
+ (r) => r.type === 'file' && r.id !== 'streaming-file'
+ )
+ setResources((rs) => rs.filter((r) => r.id !== 'streaming-file'))
+ if (lastFileResource) {
+ setActiveResourceId(lastFileResource.id)
}
- flush()
}
- break
- }
- case 'subagent_end': {
activeSubagent = undefined
+ activeSubagentParentToolCallId = undefined
blocks.push({ type: 'subagent_end' })
flush()
- break
- }
- case 'title_updated': {
- queryClient.invalidateQueries({
- queryKey: taskKeys.list(workspaceId),
- })
- onTitleUpdateRef.current?.()
- break
- }
- case 'error': {
- sawStreamError = true
- setError(parsed.error || 'An error occurred')
- appendInlineErrorTag(buildInlineErrorTag(parsed))
- break
- }
- case 'done': {
- sawDoneEvent = true
- break
}
+ break
+ }
+ case MothershipStreamV1EventType.error: {
+ const payload = getPayloadData(parsed)
+ sawStreamError = true
+ setError(
+ (typeof payload.message === 'string' ? payload.message : undefined) ||
+ (typeof payload.error === 'string' ? payload.error : undefined) ||
+ 'An error occurred'
+ )
+ appendInlineErrorTag(buildInlineErrorTag(parsed))
+ break
+ }
+ case MothershipStreamV1EventType.complete: {
+ sawCompleteEvent = true
+ // `complete` is terminal for this stream, even if the transport takes a moment
+ // longer to close.
+ break readLoop
+ }
+ }
+ }
+ } finally {
+ if (streamReaderRef.current === reader) {
+ streamReaderRef.current = null
+ }
+ }
+ return { sawStreamError, sawComplete: sawCompleteEvent }
+ },
+ [
+ workspaceId,
+ queryClient,
+ addResource,
+ removeResource,
+ applyPreviewSessionUpdate,
+ applyCompletedPreviewSession,
+ removePreviewSessionImmediate,
+ syncPreviewResourceChrome,
+ ]
+ )
+ processSSEStreamRef.current = processSSEStream
+
+ const getActiveStreamIdForChat = useCallback(
+ async (chatId: string): Promise => {
+ const cached = queryClient.getQueryData(taskKeys.detail(chatId))
+ if (cached?.activeStreamId) {
+ return cached.activeStreamId
+ }
+
+ try {
+ const history = await fetchChatHistory(chatId)
+ queryClient.setQueryData(taskKeys.detail(chatId), history)
+ return history.activeStreamId ?? null
+ } catch (error) {
+ logger.warn('Failed to load chat history while recovering stream', {
+ chatId,
+ error: error instanceof Error ? error.message : String(error),
+ })
+ return null
+ }
+ },
+ [queryClient]
+ )
+
+ const fetchStreamBatch = useCallback(
+ async (
+ streamId: string,
+ afterCursor: string,
+ signal?: AbortSignal
+ ): Promise => {
+ const response = await fetch(
+ `/api/copilot/chat/stream?streamId=${encodeURIComponent(streamId)}&after=${encodeURIComponent(afterCursor)}&batch=true`,
+ { signal }
+ )
+ if (!response.ok) {
+ throw new Error(`Stream resume batch failed: ${response.status}`)
+ }
+ const batch = (await response.json()) as StreamBatchResponse
+ if (Array.isArray(batch.previewSessions) && batch.previewSessions.length > 0) {
+ seedPreviewSessions(batch.previewSessions)
+ }
+ return batch
+ },
+ [seedPreviewSessions]
+ )
+
+ const attachToExistingStream = useCallback(
+ async (opts: {
+ streamId: string
+ assistantId: string
+ expectedGen: number
+ initialBatch?: StreamBatchResponse | null
+ afterCursor?: string
+ }): Promise<{ error: boolean; aborted: boolean }> => {
+ const { streamId, assistantId, expectedGen, afterCursor = '0' } = opts
+ let latestCursor = afterCursor
+ let seedEvents = opts.initialBatch?.events ?? []
+ let streamStatus = opts.initialBatch?.status ?? 'unknown'
+
+ setIsSending(true)
+ setIsReconnecting(true)
+ setError(null)
+
+ try {
+ while (streamGenRef.current === expectedGen) {
+ if (seedEvents.length > 0) {
+ const replayResult = await processSSEStreamRef.current(
+ buildReplayStream(seedEvents).getReader(),
+ assistantId,
+ expectedGen,
+ { preserveExistingState: true }
+ )
+ latestCursor = String(seedEvents[seedEvents.length - 1]?.eventId ?? latestCursor)
+ lastCursorRef.current = latestCursor
+ seedEvents = []
+
+ if (replayResult.sawStreamError) {
+ return { error: true, aborted: false }
+ }
+ }
+
+ if (isTerminalStreamStatus(streamStatus)) {
+ if (streamStatus === 'error') {
+ setError(RECONNECT_TAIL_ERROR)
}
+ return { error: streamStatus === 'error', aborted: false }
+ }
+
+ const activeAbort = abortControllerRef.current
+ if (!activeAbort || activeAbort.signal.aborted) {
+ return { error: false, aborted: true }
+ }
+
+ logger.info('Opening live stream tail', { streamId, afterCursor: latestCursor })
+
+ const sseRes = await fetch(
+ `/api/copilot/chat/stream?streamId=${encodeURIComponent(streamId)}&after=${encodeURIComponent(latestCursor)}`,
+ { signal: activeAbort.signal }
+ )
+ if (!sseRes.ok || !sseRes.body) {
+ throw new Error(RECONNECT_TAIL_ERROR)
+ }
+
+ setIsReconnecting(false)
+
+ const liveResult = await processSSEStreamRef.current(
+ sseRes.body.getReader(),
+ assistantId,
+ expectedGen,
+ { preserveExistingState: true }
+ )
+
+ if (liveResult.sawStreamError) {
+ return { error: true, aborted: false }
+ }
+
+ if (liveResult.sawComplete) {
+ return { error: false, aborted: false }
}
+
+ setIsReconnecting(true)
+
+ latestCursor = lastCursorRef.current || latestCursor
+
+ logger.warn('Live stream ended without terminal event, fetching batch', {
+ streamId,
+ latestCursor,
+ })
+
+ const batch = await fetchStreamBatch(streamId, latestCursor, activeAbort.signal)
+ seedEvents = batch.events
+ streamStatus = batch.status
+
+ if (batch.events.length > 0) {
+ latestCursor = String(batch.events[batch.events.length - 1].eventId)
+ lastCursorRef.current = latestCursor
+ }
+
+ if (batch.events.length === 0 && !isTerminalStreamStatus(batch.status)) {
+ if (activeAbort.signal.aborted || streamGenRef.current !== expectedGen) {
+ return { error: false, aborted: true }
+ }
+ }
+ }
+
+ return { error: false, aborted: true }
+ } catch (err) {
+ if (err instanceof Error && err.name === 'AbortError') {
+ return { error: false, aborted: true }
+ }
+ throw err
+ } finally {
+ setIsReconnecting(false)
+ }
+ },
+ [fetchStreamBatch]
+ )
+ attachToExistingStreamRef.current = attachToExistingStream
+
+ const resumeOrFinalize = useCallback(
+ async (opts: {
+ streamId: string
+ assistantId: string
+ gen: number
+ afterCursor: string
+ signal?: AbortSignal
+ }): Promise => {
+ const { streamId, assistantId, gen, afterCursor, signal } = opts
+
+ const batch = await fetchStreamBatch(streamId, afterCursor, signal)
+ if (streamGenRef.current !== gen) return
+
+ if (isTerminalStreamStatus(batch.status)) {
+ if (batch.events.length > 0) {
+ await processSSEStreamRef.current(
+ buildReplayStream(batch.events).getReader(),
+ assistantId,
+ gen,
+ { preserveExistingState: true }
+ )
+ }
+ finalizeRef.current(batch.status === 'error' ? { error: true } : undefined)
+ return
+ }
+
+ const reconnectResult = await attachToExistingStream({
+ streamId,
+ assistantId,
+ expectedGen: gen,
+ initialBatch: batch,
+ afterCursor:
+ batch.events.length > 0
+ ? String(batch.events[batch.events.length - 1].eventId)
+ : afterCursor,
+ })
+
+ if (streamGenRef.current === gen && !reconnectResult.aborted) {
+ finalizeRef.current(reconnectResult.error ? { error: true } : undefined)
+ }
+ },
+ [fetchStreamBatch, attachToExistingStream]
+ )
+
+ const retryReconnect = useCallback(
+ async (opts: { streamId: string; assistantId: string; gen: number }): Promise => {
+ const { streamId, assistantId, gen } = opts
+
+ for (let attempt = 0; attempt <= MAX_RECONNECT_ATTEMPTS; attempt++) {
+ if (streamGenRef.current !== gen) return true
+ if (abortControllerRef.current?.signal.aborted) return true
+
+ if (attempt > 0) {
+ const delayMs = Math.min(
+ RECONNECT_BASE_DELAY_MS * 2 ** (attempt - 1),
+ RECONNECT_MAX_DELAY_MS
+ )
+ logger.warn('Reconnect attempt', {
+ streamId,
+ attempt,
+ maxAttempts: MAX_RECONNECT_ATTEMPTS,
+ delayMs,
+ })
+ setIsReconnecting(true)
+ await new Promise((resolve) => setTimeout(resolve, delayMs))
+ if (streamGenRef.current !== gen) return true
+ if (abortControllerRef.current?.signal.aborted) return true
}
- } finally {
- if (streamReaderRef.current === reader) {
- streamReaderRef.current = null
+
+ try {
+ await resumeOrFinalize({
+ streamId,
+ assistantId,
+ gen,
+ afterCursor: lastCursorRef.current || '0',
+ signal: abortControllerRef.current?.signal,
+ })
+ if (streamGenRef.current !== gen) return true
+ if (abortControllerRef.current?.signal.aborted) return true
+ if (!sendingRef.current) return true
+ } catch (err) {
+ if (err instanceof Error && err.name === 'AbortError') return true
+ logger.warn('Reconnect attempt failed', {
+ streamId,
+ attempt: attempt + 1,
+ error: err instanceof Error ? err.message : String(err),
+ })
}
}
- return {
- sawStreamError,
- sawDoneEvent,
- lastEventId,
- }
+
+ logger.error('All reconnect attempts exhausted', {
+ streamId,
+ maxAttempts: MAX_RECONNECT_ATTEMPTS,
+ })
+ setIsReconnecting(false)
+ return false
},
- [workspaceId, queryClient, addResource, removeResource]
+ [resumeOrFinalize]
)
- processSSEStreamRef.current = processSSEStream
+ retryReconnectRef.current = retryReconnect
const persistPartialResponse = useCallback(async () => {
const chatId = chatIdRef.current
@@ -1659,7 +2286,7 @@ export function useChat(
const content = streamingContentRef.current
- const storedBlocks: TaskStoredContentBlock[] = streamingBlocksRef.current.map((block) => {
+ const storedBlocks = streamingBlocksRef.current.map((block) => {
if (block.type === 'tool_call' && block.toolCall) {
const isCancelled =
block.toolCall.status === 'executing' || block.toolCall.status === 'cancelled'
@@ -1669,7 +2296,7 @@ export function useChat(
toolCall: {
id: block.toolCall.id,
name: block.toolCall.name,
- state: isCancelled ? 'cancelled' : block.toolCall.status,
+ state: isCancelled ? MothershipStreamV1ToolOutcome.cancelled : block.toolCall.status,
params: block.toolCall.params,
result: block.toolCall.result,
display: {
@@ -1683,7 +2310,7 @@ export function useChat(
})
if (storedBlocks.length > 0) {
- storedBlocks.push({ type: 'stopped' })
+ storedBlocks.push({ type: 'stopped', content: undefined })
}
try {
@@ -1697,12 +2324,17 @@ export function useChat(
...(storedBlocks.length > 0 && { contentBlocks: storedBlocks }),
}),
})
- if (res.ok) {
- streamingContentRef.current = ''
- streamingBlocksRef.current = []
+ if (!res.ok) {
+ const payload = await res.json().catch(() => null)
+ throw new Error(
+ typeof payload?.error === 'string' ? payload.error : 'Failed to persist partial response'
+ )
}
+ streamingContentRef.current = ''
+ streamingBlocksRef.current = []
} catch (err) {
logger.warn('Failed to persist partial response', err)
+ throw err instanceof Error ? err : new Error('Failed to persist partial response')
}
}, [])
@@ -1718,19 +2350,11 @@ export function useChat(
const messagesRef = useRef(messages)
messagesRef.current = messages
- const visibleMessageQueue = useMemo(
- () =>
- pendingRecoveryMessage
- ? [
- pendingRecoveryMessage,
- ...messageQueue.filter((msg) => msg.id !== pendingRecoveryMessage.id),
- ]
- : messageQueue,
- [messageQueue, pendingRecoveryMessage]
- )
const finalize = useCallback(
(options?: { error?: boolean }) => {
+ reconcileTerminalPreviewSessions()
+ locallyTerminalStreamIdRef.current = streamIdRef.current
sendingRef.current = false
setIsSending(false)
setIsReconnecting(false)
@@ -1744,147 +2368,24 @@ export function useChat(
}
}
- if (options?.error) {
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
- setMessageQueue([])
- return
- }
-
- const recoveryMessage = pendingRecoveryMessageRef.current
- if (recoveryMessage) {
- setPendingRecoveryMessage(null)
- const gen = streamGenRef.current
- queueMicrotask(() => {
- if (streamGenRef.current !== gen) return
- sendMessageRef.current(
- recoveryMessage.content,
- recoveryMessage.fileAttachments,
- recoveryMessage.contexts
- )
- })
+ if (options?.error || manualQueueSendIdRef.current) {
return
}
const next = messageQueueRef.current[0]
if (next) {
setMessageQueue((prev) => prev.filter((m) => m.id !== next.id))
- const gen = streamGenRef.current
- queueMicrotask(() => {
- if (streamGenRef.current !== gen) return
- sendMessageRef.current(next.content, next.fileAttachments, next.contexts)
- })
+ void sendMessageRef.current(next.content, next.fileAttachments, next.contexts)
}
},
- [invalidateChatQueries]
+ [invalidateChatQueries, reconcileTerminalPreviewSessions]
)
finalizeRef.current = finalize
- const resumeOrFinalize = useCallback(
- async (opts: {
- streamId: string
- assistantId: string
- gen: number
- fromEventId: number
- snapshot?: StreamSnapshot | null
- signal?: AbortSignal
- }): Promise => {
- const { streamId, assistantId, gen, fromEventId, snapshot, signal } = opts
-
- const batch =
- snapshot ??
- (await (async () => {
- const b = await fetchStreamBatch(streamId, fromEventId, signal)
- if (streamGenRef.current !== gen) return null
- return { events: b.events, status: b.status } as StreamSnapshot
- })())
-
- if (!batch || streamGenRef.current !== gen) return
-
- if (isTerminalStreamStatus(batch.status)) {
- finalize(batch.status === 'error' ? { error: true } : undefined)
- return
- }
-
- const reconnectResult = await attachToExistingStream({
- streamId,
- assistantId,
- expectedGen: gen,
- snapshot: batch,
- initialLastEventId: batch.events[batch.events.length - 1]?.eventId ?? fromEventId,
- })
-
- if (streamGenRef.current === gen && !reconnectResult.aborted) {
- finalize(reconnectResult.error ? { error: true } : undefined)
- }
- },
- [fetchStreamBatch, attachToExistingStream, finalize]
- )
-
- const retryReconnect = useCallback(
- async (opts: {
- streamId: string
- assistantId: string
- gen: number
- initialSnapshot?: StreamSnapshot | null
- }): Promise => {
- const { streamId, assistantId, gen, initialSnapshot } = opts
-
- for (let attempt = 0; attempt <= MAX_RECONNECT_ATTEMPTS; attempt++) {
- if (streamGenRef.current !== gen) return true
- if (abortControllerRef.current?.signal.aborted) return true
-
- if (attempt > 0) {
- const delayMs = Math.min(
- RECONNECT_BASE_DELAY_MS * 2 ** (attempt - 1),
- RECONNECT_MAX_DELAY_MS
- )
- logger.warn('Reconnect attempt', {
- streamId,
- attempt,
- maxAttempts: MAX_RECONNECT_ATTEMPTS,
- delayMs,
- })
- setIsReconnecting(true)
- await new Promise((resolve) => setTimeout(resolve, delayMs))
- if (streamGenRef.current !== gen) return true
- if (abortControllerRef.current?.signal.aborted) return true
- }
-
- try {
- await resumeOrFinalize({
- streamId,
- assistantId,
- gen,
- fromEventId: lastEventIdRef.current,
- snapshot: attempt === 0 ? initialSnapshot : undefined,
- signal: abortControllerRef.current?.signal,
- })
- return true
- } catch (err) {
- if (err instanceof Error && err.name === 'AbortError') return true
- logger.warn('Reconnect attempt failed', {
- streamId,
- attempt: attempt + 1,
- error: err instanceof Error ? err.message : String(err),
- })
- }
- }
-
- logger.error('All reconnect attempts exhausted', {
- streamId,
- maxAttempts: MAX_RECONNECT_ATTEMPTS,
- })
- setIsReconnecting(false)
- return false
- },
- [resumeOrFinalize]
- )
- retryReconnectRef.current = retryReconnect
-
const sendMessage = useCallback(
async (message: string, fileAttachments?: FileAttachmentForApi[], contexts?: ChatContext[]) => {
if (!message.trim() || !workspaceId) return
+ const pendingStop = pendingStopPromiseRef.current
if (sendingRef.current) {
const queued: QueuedMessage = {
@@ -1902,16 +2403,16 @@ export function useChat(
setError(null)
setIsSending(true)
sendingRef.current = true
+ locallyTerminalStreamIdRef.current = undefined
const userMessageId = generateId()
const assistantId = generateId()
pendingUserMsgRef.current = { id: userMessageId, content: message }
streamIdRef.current = userMessageId
- lastEventIdRef.current = 0
- clientExecutionStartedRef.current.clear()
+ lastCursorRef.current = '0'
- const storedAttachments: TaskStoredFileAttachment[] | undefined =
+ const storedAttachments: PersistedFileAttachment[] | undefined =
fileAttachments && fileAttachments.length > 0
? fileAttachments.map((f) => ({
id: f.id,
@@ -1923,30 +2424,6 @@ export function useChat(
: undefined
const requestChatId = selectedChatIdRef.current ?? chatIdRef.current
- const previousChatHistory = requestChatId
- ? queryClient.getQueryData(taskKeys.detail(requestChatId))
- : undefined
- if (requestChatId) {
- const cachedUserMsg: TaskStoredMessage = {
- id: userMessageId,
- role: 'user' as const,
- content: message,
- ...(storedAttachments && { fileAttachments: storedAttachments }),
- }
- queryClient.setQueryData(taskKeys.detail(requestChatId), (old) => {
- return old
- ? {
- ...old,
- messages: [...old.messages, cachedUserMsg],
- activeStreamId: userMessageId,
- }
- : undefined
- })
- }
-
- const userAttachments = storedAttachments?.map(toDisplayAttachment)
- const previousMessages = messagesRef.current
-
const messageContexts = contexts?.map((c) => ({
kind: c.kind,
label: c.label,
@@ -1956,23 +2433,101 @@ export function useChat(
...('fileId' in c && c.fileId ? { fileId: c.fileId } : {}),
...('folderId' in c && c.folderId ? { folderId: c.folderId } : {}),
}))
+ const cachedUserMsg: PersistedMessage = {
+ id: userMessageId,
+ role: 'user' as const,
+ content: message,
+ timestamp: new Date().toISOString(),
+ ...(storedAttachments && { fileAttachments: storedAttachments }),
+ ...(messageContexts && messageContexts.length > 0 ? { contexts: messageContexts } : {}),
+ }
- setMessages((prev) => [
- ...prev,
- {
- id: userMessageId,
- role: 'user',
- content: message,
- attachments: userAttachments,
- ...(messageContexts && messageContexts.length > 0 ? { contexts: messageContexts } : {}),
- },
- { id: assistantId, role: 'assistant', content: '', contentBlocks: [] },
- ])
+ const userAttachments = storedAttachments?.map((f) => ({
+ id: f.id,
+ filename: f.filename,
+ media_type: f.media_type,
+ size: f.size,
+ previewUrl: f.media_type.startsWith('image/')
+ ? `/api/files/serve/${encodeURIComponent(f.key)}?context=mothership`
+ : undefined,
+ }))
+
+ const optimisticUserMessage: ChatMessage = {
+ id: userMessageId,
+ role: 'user',
+ content: message,
+ attachments: userAttachments,
+ ...(messageContexts && messageContexts.length > 0 ? { contexts: messageContexts } : {}),
+ }
+ const optimisticAssistantMessage: ChatMessage = {
+ id: assistantId,
+ role: 'assistant',
+ content: '',
+ contentBlocks: [],
+ }
+
+ const applyOptimisticSend = () => {
+ if (requestChatId) {
+ queryClient.setQueryData(taskKeys.detail(requestChatId), (old) => {
+ if (!old) return undefined
+ const nextMessages = old.messages.filter((m) => m.id !== userMessageId)
+ return {
+ ...old,
+ resources: old.resources.filter((r) => r.id !== 'streaming-file'),
+ messages: [...nextMessages, cachedUserMsg],
+ activeStreamId: userMessageId,
+ }
+ })
+ }
+
+ setMessages((prev) => {
+ const nextMessages = prev.filter((m) => m.id !== userMessageId && m.id !== assistantId)
+ return [...nextMessages, optimisticUserMessage, optimisticAssistantMessage]
+ })
+ }
+
+ const rollbackOptimisticSend = () => {
+ if (requestChatId) {
+ queryClient.setQueryData(taskKeys.detail(requestChatId), (old) => {
+ if (!old) return undefined
+ return {
+ ...old,
+ messages: old.messages.filter((m) => m.id !== userMessageId),
+ activeStreamId: old.activeStreamId === userMessageId ? null : old.activeStreamId,
+ }
+ })
+ }
+
+ setMessages((prev) => prev.filter((m) => m.id !== userMessageId && m.id !== assistantId))
+ }
+
+ applyOptimisticSend()
const abortController = new AbortController()
abortControllerRef.current = abortController
try {
+ if (pendingStop) {
+ try {
+ await pendingStop
+ // Query invalidation from the stop barrier can briefly stomp the optimistic tail.
+ // Re-apply it before the real POST so the mothership UI stays immediate.
+ applyOptimisticSend()
+ } catch (err) {
+ rollbackOptimisticSend()
+ pendingUserMsgRef.current = null
+ if (streamIdRef.current === userMessageId) {
+ streamIdRef.current = undefined
+ }
+ abortControllerRef.current = null
+ sendingRef.current = false
+ setIsSending(false)
+ setIsReconnecting(false)
+ setError(err instanceof Error ? err.message : 'Failed to stop the previous response')
+ return
+ }
+ }
+
const currentActiveId = activeResourceIdRef.current
const currentResources = resourcesRef.current
const resourceAttachments =
@@ -2005,86 +2560,55 @@ export function useChat(
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
+ if (response.status === 409) {
+ const conflictStreamId =
+ typeof errorData.activeStreamId === 'string'
+ ? errorData.activeStreamId
+ : userMessageId
+ streamIdRef.current = conflictStreamId
+ const succeeded = await retryReconnect({
+ streamId: conflictStreamId,
+ assistantId,
+ gen,
+ })
+ if (succeeded) return
+ if (streamGenRef.current === gen) {
+ finalize({ error: true })
+ }
+ return
+ }
throw new Error(errorData.error || `Request failed: ${response.status}`)
}
if (!response.body) throw new Error('No response body')
- const termination = await processSSEStream(response.body.getReader(), assistantId, {
- expectedGen: gen,
- })
+ const streamResult = await processSSEStream(response.body.getReader(), assistantId, gen)
if (streamGenRef.current === gen) {
- if (termination.sawStreamError) {
+ if (streamResult.sawStreamError) {
finalize({ error: true })
return
}
+ // A live SSE `complete` event is already terminal. Finalize immediately so follow-up
+ // sends do not get spuriously queued behind an already-finished response.
+ if (streamResult.sawComplete) {
+ finalize()
+ return
+ }
+
await resumeOrFinalize({
- streamId: userMessageId,
+ streamId: streamIdRef.current || userMessageId,
assistantId,
gen,
- fromEventId: termination.lastEventId,
+ afterCursor: lastCursorRef.current || '0',
signal: abortController.signal,
})
+ if (streamGenRef.current === gen && sendingRef.current) {
+ finalize()
+ }
}
} catch (err) {
if (err instanceof Error && err.name === 'AbortError') return
- const errorMessage = err instanceof Error ? err.message : 'Failed to send message'
- if (requestChatId && isActiveStreamConflictError(errorMessage)) {
- logger.info('Active stream conflict detected while sending message; reattaching', {
- chatId: requestChatId,
- attemptedStreamId: userMessageId,
- })
-
- if (previousChatHistory) {
- queryClient.setQueryData(taskKeys.detail(requestChatId), previousChatHistory)
- }
- setMessages(previousMessages)
- const queuedMessage: QueuedMessage = {
- id: generateId(),
- content: message,
- fileAttachments,
- contexts,
- }
- pendingRecoveryMessageRef.current = queuedMessage
- setPendingRecoveryMessage(queuedMessage)
-
- try {
- const pendingRecovery = await preparePendingStreamRecovery(requestChatId)
- if (!pendingRecovery) {
- setError(errorMessage)
- if (streamGenRef.current === gen) {
- finalize({ error: true })
- }
- return
- }
-
- streamIdRef.current = pendingRecovery.streamId
- lastEventIdRef.current =
- pendingRecovery.snapshot?.events?.[pendingRecovery.snapshot.events.length - 1]
- ?.eventId ?? 0
-
- const rehydratedMessages = messagesRef.current
- const lastAssistantMsg = [...rehydratedMessages]
- .reverse()
- .find((m) => m.role === 'assistant')
- const recoveryAssistantId = lastAssistantMsg?.id ?? assistantId
-
- await resumeOrFinalize({
- streamId: pendingRecovery.streamId,
- assistantId: recoveryAssistantId,
- gen,
- fromEventId: lastEventIdRef.current,
- snapshot: pendingRecovery.snapshot,
- })
- return
- } catch (recoveryError) {
- logger.warn('Failed to recover active stream after conflict', {
- chatId: requestChatId,
- error: recoveryError instanceof Error ? recoveryError.message : String(recoveryError),
- })
- }
- }
const activeStreamId = streamIdRef.current
if (activeStreamId && streamGenRef.current === gen) {
@@ -2096,26 +2620,22 @@ export function useChat(
if (succeeded) return
}
- setError(errorMessage)
+ setError(err instanceof Error ? err.message : 'Failed to send message')
if (streamGenRef.current === gen) {
finalize({ error: true })
}
return
}
},
- [
- workspaceId,
- queryClient,
- processSSEStream,
- finalize,
- resumeOrFinalize,
- retryReconnect,
- preparePendingStreamRecovery,
- ]
+ [workspaceId, queryClient, processSSEStream, finalize, resumeOrFinalize, retryReconnect]
)
sendMessageRef.current = sendMessage
const stopGeneration = useCallback(async () => {
+ if (pendingStopPromiseRef.current) {
+ return pendingStopPromiseRef.current
+ }
+
const wasSending = sendingRef.current
const sid =
streamIdRef.current ||
@@ -2123,17 +2643,14 @@ export function useChat(
?.activeStreamId ||
undefined
+ locallyTerminalStreamIdRef.current = sid
streamGenRef.current++
streamReaderRef.current?.cancel().catch(() => {})
streamReaderRef.current = null
- abortControllerRef.current?.abort()
+ abortControllerRef.current?.abort('user_stop:client_stopGeneration')
abortControllerRef.current = null
sendingRef.current = false
setIsSending(false)
- setIsReconnecting(false)
- lastEventIdRef.current = 0
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
setMessages((prev) =>
prev.map((msg) => {
@@ -2154,118 +2671,140 @@ export function useChat(
})
)
- if (sid) {
- fetch('/api/copilot/chat/abort', {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({ streamId: sid }),
- }).catch(() => {})
- }
+ const stopBarrier = (async () => {
+ try {
+ if (wasSending && !chatIdRef.current) {
+ const start = Date.now()
+ while (!chatIdRef.current && Date.now() - start < 3000) {
+ await new Promise((r) => setTimeout(r, 50))
+ }
+ }
- if (wasSending && !chatIdRef.current) {
- const start = Date.now()
- while (!chatIdRef.current && Date.now() - start < 3000) {
- await new Promise((r) => setTimeout(r, 50))
- }
- }
+ const resolvedChatId = chatIdRef.current
+ const abortPromise = sid
+ ? (async () => {
+ const res = await fetch('/api/copilot/chat/abort', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ streamId: sid,
+ ...(resolvedChatId ? { chatId: resolvedChatId } : {}),
+ }),
+ })
+ if (!res.ok) {
+ const payload = await res.json().catch(() => null)
+ throw new Error(
+ typeof payload?.error === 'string'
+ ? payload.error
+ : 'Failed to abort previous response'
+ )
+ }
+ })()
+ : Promise.resolve()
- if (wasSending && chatIdRef.current) {
- await persistPartialResponse()
- }
- invalidateChatQueries()
- setStreamingFile(null)
- streamingFileRef.current = null
- setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
-
- const execState = useExecutionStore.getState()
- const consoleStore = useTerminalConsoleStore.getState()
- for (const [workflowId, wfExec] of execState.workflowExecutions) {
- if (!wfExec.isExecuting) continue
-
- const toolCallId = markRunToolManuallyStopped(workflowId)
- cancelRunToolExecution(workflowId)
-
- const executionId = execState.getCurrentExecutionId(workflowId)
- if (executionId) {
- execState.setCurrentExecutionId(workflowId, null)
- fetch(`/api/workflows/${workflowId}/executions/${executionId}/cancel`, {
- method: 'POST',
- }).catch(() => {})
- }
+ if (wasSending && resolvedChatId) {
+ await persistPartialResponse()
+ }
- consoleStore.cancelRunningEntries(workflowId)
- const now = new Date()
- consoleStore.addConsole({
- input: {},
- output: {},
- success: false,
- error: 'Execution was cancelled',
- durationMs: 0,
- startedAt: now.toISOString(),
- executionOrder: Number.MAX_SAFE_INTEGER,
- endedAt: now.toISOString(),
- workflowId,
- blockId: 'cancelled',
- executionId: executionId ?? undefined,
- blockName: 'Execution Cancelled',
- blockType: 'cancelled',
- })
+ await abortPromise
+ } finally {
+ invalidateChatQueries()
+ resetEphemeralPreviewState({ removeStreamingResource: true })
+
+ const execState = useExecutionStore.getState()
+ const consoleStore = useTerminalConsoleStore.getState()
+ for (const [workflowId, wfExec] of execState.workflowExecutions) {
+ if (!wfExec.isExecuting) continue
+
+ const toolCallId = markRunToolManuallyStopped(workflowId)
+ cancelRunToolExecution(workflowId)
+
+ const executionId = execState.getCurrentExecutionId(workflowId)
+ if (executionId) {
+ execState.setCurrentExecutionId(workflowId, null)
+ fetch(`/api/workflows/${workflowId}/executions/${executionId}/cancel`, {
+ method: 'POST',
+ }).catch(() => {})
+ }
+
+ consoleStore.cancelRunningEntries(workflowId)
+ const now = new Date()
+ consoleStore.addConsole({
+ input: {},
+ output: {},
+ success: false,
+ error: 'Execution was cancelled',
+ durationMs: 0,
+ startedAt: now.toISOString(),
+ executionOrder: Number.MAX_SAFE_INTEGER,
+ endedAt: now.toISOString(),
+ workflowId,
+ blockId: 'cancelled',
+ executionId: executionId ?? undefined,
+ blockName: 'Execution Cancelled',
+ blockType: 'cancelled',
+ })
- executionStream.cancel(workflowId)
- consolePersistence.executionEnded()
- execState.setIsExecuting(workflowId, false)
- execState.setIsDebugging(workflowId, false)
- execState.setActiveBlocks(workflowId, new Set())
+ executionStream.cancel(workflowId)
+ execState.setIsExecuting(workflowId, false)
+ execState.setIsDebugging(workflowId, false)
+ execState.setActiveBlocks(workflowId, new Set())
- reportManualRunToolStop(workflowId, toolCallId).catch(() => {})
+ reportManualRunToolStop(workflowId, toolCallId).catch(() => {})
+ }
+ }
+ })()
+
+ pendingStopPromiseRef.current = stopBarrier
+ try {
+ await stopBarrier
+ } catch (err) {
+ setError(err instanceof Error ? err.message : 'Failed to stop the previous response')
+ throw err
+ } finally {
+ if (pendingStopPromiseRef.current === stopBarrier) {
+ pendingStopPromiseRef.current = null
+ }
}
- }, [invalidateChatQueries, persistPartialResponse, executionStream])
+ }, [
+ invalidateChatQueries,
+ persistPartialResponse,
+ executionStream,
+ queryClient,
+ resetEphemeralPreviewState,
+ ])
const removeFromQueue = useCallback((id: string) => {
- if (pendingRecoveryMessageRef.current?.id === id) {
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
- return
- }
- messageQueueRef.current = messageQueueRef.current.filter((m) => m.id !== id)
setMessageQueue((prev) => prev.filter((m) => m.id !== id))
}, [])
const sendNow = useCallback(
async (id: string) => {
- const recoveryMessage = pendingRecoveryMessageRef.current
- const msg =
- recoveryMessage?.id === id
- ? recoveryMessage
- : messageQueueRef.current.find((m) => m.id === id)
+ if (manualQueueSendIdRef.current === id) return
+ const msg = messageQueueRef.current.find((m) => m.id === id)
if (!msg) return
- // Eagerly update ref so a rapid second click finds the message already gone
- if (recoveryMessage?.id === id) {
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
- } else {
- messageQueueRef.current = messageQueueRef.current.filter((m) => m.id !== id)
- }
- await stopGeneration()
- if (recoveryMessage?.id !== id) {
- setMessageQueue((prev) => prev.filter((m) => m.id !== id))
+ manualQueueSendIdRef.current = id
+ setMessageQueue((prev) => prev.filter((m) => m.id !== id))
+ try {
+ await stopGeneration()
+ await sendMessage(msg.content, msg.fileAttachments, msg.contexts)
+ } catch {
+ setMessageQueue((prev) => {
+ if (prev.some((m) => m.id === id)) return prev
+ return [msg, ...prev]
+ })
+ } finally {
+ if (manualQueueSendIdRef.current === id) {
+ manualQueueSendIdRef.current = null
+ }
}
- await sendMessage(msg.content, msg.fileAttachments, msg.contexts)
},
[stopGeneration, sendMessage]
)
const editQueuedMessage = useCallback((id: string): QueuedMessage | undefined => {
- const recoveryMessage = pendingRecoveryMessageRef.current
- if (recoveryMessage?.id === id) {
- pendingRecoveryMessageRef.current = null
- setPendingRecoveryMessage(null)
- return recoveryMessage
- }
-
const msg = messageQueueRef.current.find((m) => m.id === id)
if (!msg) return undefined
- messageQueueRef.current = messageQueueRef.current.filter((m) => m.id !== id)
setMessageQueue((prev) => prev.filter((m) => m.id !== id))
return msg
}, [])
@@ -2276,9 +2815,6 @@ export function useChat(
abortControllerRef.current = null
streamGenRef.current++
sendingRef.current = false
- lastEventIdRef.current = 0
- clientExecutionStartedRef.current.clear()
- pendingRecoveryMessageRef.current = null
}
}, [])
@@ -2296,11 +2832,11 @@ export function useChat(
addResource,
removeResource,
reorderResources,
- messageQueue: visibleMessageQueue,
+ messageQueue,
removeFromQueue,
sendNow,
editQueuedMessage,
- streamingFile,
+ previewSession,
genericResourceData,
}
}
diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions.test.tsx b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions.test.tsx
new file mode 100644
index 00000000000..863df51596d
--- /dev/null
+++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions.test.tsx
@@ -0,0 +1,182 @@
+/**
+ * @vitest-environment node
+ */
+import { describe, expect, it } from 'vitest'
+import type { FilePreviewSession } from '@/lib/copilot/request/session'
+import {
+ buildCompletedPreviewSessions,
+ INITIAL_FILE_PREVIEW_SESSIONS_STATE,
+ reduceFilePreviewSessions,
+} from '@/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions'
+
+function createSession(
+ overrides: Partial & Pick
+): FilePreviewSession {
+ return {
+ schemaVersion: 1,
+ id: overrides.id,
+ streamId: overrides.streamId ?? 'stream-1',
+ toolCallId: overrides.toolCallId,
+ status: overrides.status ?? 'streaming',
+ fileName: overrides.fileName ?? `${overrides.id}.md`,
+ previewText: overrides.previewText ?? '',
+ previewVersion: overrides.previewVersion ?? 1,
+ updatedAt: overrides.updatedAt ?? '2026-04-10T00:00:00.000Z',
+ ...(overrides.fileId ? { fileId: overrides.fileId } : {}),
+ ...(overrides.targetKind ? { targetKind: overrides.targetKind } : {}),
+ ...(overrides.operation ? { operation: overrides.operation } : {}),
+ ...(overrides.edit ? { edit: overrides.edit } : {}),
+ ...(overrides.completedAt ? { completedAt: overrides.completedAt } : {}),
+ }
+}
+
+describe('reduceFilePreviewSessions', () => {
+ it('builds complete sessions for terminal stream reconciliation', () => {
+ const completedAt = '2026-04-10T00:00:10.000Z'
+ const nextSessions = buildCompletedPreviewSessions(
+ {
+ 'preview-1': createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ status: 'pending',
+ previewText: 'draft',
+ }),
+ 'preview-2': createSession({
+ id: 'preview-2',
+ toolCallId: 'preview-2',
+ status: 'streaming',
+ previewText: 'partial',
+ }),
+ 'preview-3': createSession({
+ id: 'preview-3',
+ toolCallId: 'preview-3',
+ status: 'complete',
+ previewText: 'done',
+ completedAt: '2026-04-10T00:00:03.000Z',
+ }),
+ },
+ completedAt
+ )
+
+ expect(nextSessions).toHaveLength(2)
+ expect(nextSessions.map((session) => session.id)).toEqual(['preview-1', 'preview-2'])
+ expect(nextSessions.every((session) => session.status === 'complete')).toBe(true)
+ expect(nextSessions.every((session) => session.updatedAt === completedAt)).toBe(true)
+ expect(nextSessions.every((session) => session.completedAt === completedAt)).toBe(true)
+ })
+
+ it('hydrates the latest active preview session', () => {
+ const state = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
+ type: 'hydrate',
+ sessions: [
+ createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ previewVersion: 1,
+ updatedAt: '2026-04-10T00:00:00.000Z',
+ }),
+ createSession({
+ id: 'preview-2',
+ toolCallId: 'preview-2',
+ previewVersion: 2,
+ updatedAt: '2026-04-10T00:00:01.000Z',
+ previewText: 'latest',
+ }),
+ ],
+ })
+
+ expect(state.activeSessionId).toBe('preview-2')
+ expect(state.sessions['preview-2']?.previewText).toBe('latest')
+ })
+
+ it('drops the active session when it completes and promotes the next active session', () => {
+ const hydratedState = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
+ type: 'hydrate',
+ sessions: [
+ createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ previewVersion: 1,
+ updatedAt: '2026-04-10T00:00:00.000Z',
+ }),
+ createSession({
+ id: 'preview-2',
+ toolCallId: 'preview-2',
+ previewVersion: 2,
+ updatedAt: '2026-04-10T00:00:01.000Z',
+ }),
+ ],
+ })
+ const completedState = reduceFilePreviewSessions(hydratedState, {
+ type: 'complete',
+ session: createSession({
+ id: 'preview-2',
+ toolCallId: 'preview-2',
+ status: 'complete',
+ previewVersion: 3,
+ updatedAt: '2026-04-10T00:00:02.000Z',
+ completedAt: '2026-04-10T00:00:02.000Z',
+ }),
+ })
+
+ expect(completedState.activeSessionId).toBe('preview-1')
+ expect(completedState.sessions['preview-1']?.id).toBe('preview-1')
+ })
+
+ it('clears active session when the only session completes', () => {
+ const onlyStreaming = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
+ type: 'upsert',
+ session: createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ previewVersion: 2,
+ updatedAt: '2026-04-10T00:00:01.000Z',
+ previewText: 'final',
+ }),
+ })
+
+ const completed = reduceFilePreviewSessions(onlyStreaming, {
+ type: 'complete',
+ session: createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ status: 'complete',
+ previewVersion: 3,
+ updatedAt: '2026-04-10T00:00:02.000Z',
+ completedAt: '2026-04-10T00:00:02.000Z',
+ previewText: 'final',
+ }),
+ })
+
+ expect(completed.activeSessionId).toBeNull()
+ expect(completed.sessions['preview-1']?.status).toBe('complete')
+ })
+
+ it('ignores stale complete events for a newer active session', () => {
+ const activeState = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
+ type: 'upsert',
+ session: createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ previewVersion: 3,
+ updatedAt: '2026-04-10T00:00:03.000Z',
+ }),
+ })
+
+ const staleCompleteState = reduceFilePreviewSessions(activeState, {
+ type: 'complete',
+ session: createSession({
+ id: 'preview-1',
+ toolCallId: 'preview-1',
+ status: 'complete',
+ previewVersion: 2,
+ updatedAt: '2026-04-10T00:00:02.000Z',
+ completedAt: '2026-04-10T00:00:02.000Z',
+ }),
+ })
+
+ expect(staleCompleteState.activeSessionId).toBe('preview-1')
+ expect(staleCompleteState.sessions['preview-1']?.status).toBe('streaming')
+ expect(staleCompleteState.sessions['preview-1']?.previewVersion).toBe(3)
+ })
+})
diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions.ts
new file mode 100644
index 00000000000..6782585bbbb
--- /dev/null
+++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions.ts
@@ -0,0 +1,201 @@
+import { useCallback, useMemo, useReducer } from 'react'
+import type { FilePreviewSession } from '@/lib/copilot/request/session'
+
+export interface FilePreviewSessionsState {
+ activeSessionId: string | null
+ sessions: Record
+}
+
+export type FilePreviewSessionsAction =
+ | { type: 'hydrate'; sessions: FilePreviewSession[] }
+ | { type: 'upsert'; session: FilePreviewSession; activate?: boolean }
+ | { type: 'complete'; session: FilePreviewSession }
+ | { type: 'remove'; sessionId: string }
+ | { type: 'reset' }
+
+export const INITIAL_FILE_PREVIEW_SESSIONS_STATE: FilePreviewSessionsState = {
+ activeSessionId: null,
+ sessions: {},
+}
+
+export function shouldReplaceSession(
+ current: FilePreviewSession | undefined,
+ next: FilePreviewSession
+): boolean {
+ if (!current) return true
+ if (next.previewVersion !== current.previewVersion) {
+ return next.previewVersion > current.previewVersion
+ }
+ return next.updatedAt >= current.updatedAt
+}
+
+export function pickActiveSessionId(
+ sessions: Record,
+ preferredId?: string | null
+): string | null {
+ if (preferredId && sessions[preferredId]?.status !== 'complete') {
+ return preferredId
+ }
+
+ let latestActive: FilePreviewSession | null = null
+ for (const session of Object.values(sessions)) {
+ if (session.status === 'complete') continue
+ if (!latestActive || shouldReplaceSession(latestActive, session)) {
+ latestActive = session
+ }
+ }
+
+ return latestActive?.id ?? null
+}
+
+export function buildCompletedPreviewSessions(
+ sessions: Record,
+ completedAt: string
+): FilePreviewSession[] {
+ return Object.values(sessions)
+ .filter((session) => session.status !== 'complete')
+ .map((session) => ({
+ ...session,
+ status: 'complete' as const,
+ updatedAt: completedAt,
+ completedAt,
+ }))
+}
+
+export function reduceFilePreviewSessions(
+ state: FilePreviewSessionsState,
+ action: FilePreviewSessionsAction
+): FilePreviewSessionsState {
+ switch (action.type) {
+ case 'hydrate': {
+ if (action.sessions.length === 0) {
+ return state
+ }
+
+ const nextSessions = { ...state.sessions }
+ for (const session of action.sessions) {
+ if (shouldReplaceSession(nextSessions[session.id], session)) {
+ nextSessions[session.id] = session
+ }
+ }
+
+ return {
+ sessions: nextSessions,
+ activeSessionId: pickActiveSessionId(nextSessions, state.activeSessionId),
+ }
+ }
+
+ case 'upsert': {
+ if (!shouldReplaceSession(state.sessions[action.session.id], action.session)) {
+ return state
+ }
+
+ const nextSessions = {
+ ...state.sessions,
+ [action.session.id]: action.session,
+ }
+
+ return {
+ sessions: nextSessions,
+ activeSessionId:
+ action.activate === false
+ ? pickActiveSessionId(nextSessions, state.activeSessionId)
+ : action.session.status === 'complete'
+ ? pickActiveSessionId(nextSessions, state.activeSessionId)
+ : action.session.id,
+ }
+ }
+
+ case 'complete': {
+ if (!shouldReplaceSession(state.sessions[action.session.id], action.session)) {
+ return state
+ }
+
+ const nextSessions = {
+ ...state.sessions,
+ [action.session.id]: action.session,
+ }
+
+ return {
+ sessions: nextSessions,
+ activeSessionId:
+ state.activeSessionId === action.session.id
+ ? pickActiveSessionId(nextSessions, null)
+ : state.activeSessionId,
+ }
+ }
+
+ case 'remove': {
+ if (!state.sessions[action.sessionId]) {
+ return state
+ }
+
+ const nextSessions = { ...state.sessions }
+ delete nextSessions[action.sessionId]
+
+ return {
+ sessions: nextSessions,
+ activeSessionId:
+ state.activeSessionId === action.sessionId
+ ? pickActiveSessionId(nextSessions, null)
+ : state.activeSessionId,
+ }
+ }
+
+ case 'reset':
+ return INITIAL_FILE_PREVIEW_SESSIONS_STATE
+
+ default:
+ return state
+ }
+}
+
+export function useFilePreviewSessions() {
+ const [state, dispatch] = useReducer(
+ reduceFilePreviewSessions,
+ INITIAL_FILE_PREVIEW_SESSIONS_STATE
+ )
+
+ const previewSession = useMemo(
+ () => (state.activeSessionId ? (state.sessions[state.activeSessionId] ?? null) : null),
+ [state.activeSessionId, state.sessions]
+ )
+
+ const hydratePreviewSessions = useCallback((sessions: FilePreviewSession[]) => {
+ dispatch({ type: 'hydrate', sessions })
+ }, [])
+
+ const upsertPreviewSession = useCallback(
+ (session: FilePreviewSession, options?: { activate?: boolean }) => {
+ dispatch({
+ type: 'upsert',
+ session,
+ ...(options?.activate === false ? { activate: false } : {}),
+ })
+ },
+ []
+ )
+
+ const completePreviewSession = useCallback((session: FilePreviewSession) => {
+ dispatch({ type: 'complete', session })
+ }, [])
+
+ const removePreviewSession = useCallback((sessionId: string) => {
+ dispatch({ type: 'remove', sessionId })
+ }, [])
+
+ const resetPreviewSessions = useCallback(() => {
+ dispatch({ type: 'reset' })
+ }, [])
+
+ return {
+ previewSession,
+ previewSessionsById: state.sessions,
+ activePreviewSessionId: state.activeSessionId,
+ hydratePreviewSessions,
+ upsertPreviewSession,
+ completePreviewSession,
+ removePreviewSession,
+ resetPreviewSessions,
+ }
+}
diff --git a/apps/sim/app/workspace/[workspaceId]/home/types.ts b/apps/sim/app/workspace/[workspaceId]/home/types.ts
index e6ae3c9f0a9..5977f5e4720 100644
--- a/apps/sim/app/workspace/[workspaceId]/home/types.ts
+++ b/apps/sim/app/workspace/[workspaceId]/home/types.ts
@@ -1,10 +1,40 @@
-import type { MothershipResourceType } from '@/lib/copilot/resource-types'
+import {
+ Agent,
+ Auth,
+ CreateWorkflow,
+ Deploy,
+ EditWorkflow,
+ FunctionExecute,
+ GetPageContents,
+ Glob,
+ Grep,
+ Job,
+ Knowledge,
+ KnowledgeBase,
+ ManageMcpTool,
+ ManageSkill,
+ OpenResource,
+ Read as ReadTool,
+ Research,
+ ScrapePage,
+ SearchLibraryDocs,
+ SearchOnline,
+ Superagent,
+ Table,
+ UserMemory,
+ UserTable,
+ Workflow,
+ WorkspaceFile,
+} from '@/lib/copilot/generated/tool-catalog-v1'
import type { ChatContext } from '@/stores/panel'
+const EDIT_CONTENT_TOOL_ID = 'edit_content'
+const RUN_SUBAGENT_ID = 'run'
+
export type {
MothershipResource,
MothershipResourceType,
-} from '@/lib/copilot/resource-types'
+} from '@/lib/copilot/resources/types'
/** Union of all valid context kind strings, derived from {@link ChatContext}. */
export type ChatContextKind = ChatContext['kind']
@@ -24,169 +54,34 @@ export interface QueuedMessage {
contexts?: ChatContext[]
}
-/**
- * SSE event types emitted by the Go orchestrator backend.
- *
- * @example
- * ```json
- * { "type": "content", "data": "Hello world" }
- * { "type": "tool_call", "state": "executing", "toolCallId": "toolu_...", "toolName": "glob", "ui": { "title": "..." } }
- * { "type": "subagent_start", "subagent": "build" }
- * ```
- */
-export type SSEEventType =
- | 'chat_id'
- | 'request_id'
- | 'title_updated'
- | 'content'
- | 'reasoning' // openai reasoning - render as thinking text
- | 'tool_call' // tool call name
- | 'tool_call_delta' // chunk of tool call
- | 'tool_generating' // start a tool call
- | 'tool_result' // tool call result
- | 'tool_error' // tool call error
- | 'resource_added' // add a resource to the chat
- | 'resource_deleted' // delete a resource from the chat
- | 'subagent_start' // start a subagent
- | 'subagent_end' // end a subagent
- | 'structured_result' // structured result from a tool call
- | 'subagent_result' // result from a subagent
- | 'done' // end of the chat
- | 'context_compaction_start' // context compaction started
- | 'context_compaction' // conversation context was compacted
- | 'error' // error in the chat
- | 'start' // start of the chat
-
/**
* All tool names observed in the mothership SSE stream, grouped by phase.
*
* @example
* ```json
- * { "type": "tool_generating", "toolName": "glob" }
- * { "type": "tool_call", "toolName": "function_execute", "ui": { "title": "Running code", "icon": "code" } }
- * ```
- */
-export type MothershipToolName =
- | 'glob'
- | 'grep'
- | 'read'
- | 'search_online'
- | 'scrape_page'
- | 'get_page_contents'
- | 'search_library_docs'
- | 'manage_mcp_tool'
- | 'manage_skill'
- | 'manage_credential'
- | 'manage_custom_tool'
- | 'manage_job'
- | 'user_memory'
- | 'function_execute'
- | 'superagent'
- | 'user_table'
- | 'workspace_file'
- | 'create_workflow'
- | 'delete_workflow'
- | 'edit_workflow'
- | 'rename_workflow'
- | 'move_workflow'
- | 'run_workflow'
- | 'run_block'
- | 'run_from_block'
- | 'run_workflow_until_block'
- | 'create_folder'
- | 'delete_folder'
- | 'move_folder'
- | 'list_folders'
- | 'list_user_workspaces'
- | 'create_job'
- | 'complete_job'
- | 'update_job_history'
- | 'job_respond'
- | 'download_to_workspace_file'
- | 'materialize_file'
- | 'context_write'
- | 'generate_image'
- | 'generate_visualization'
- | 'crawl_website'
- | 'get_execution_summary'
- | 'get_job_logs'
- | 'get_deployment_version'
- | 'revert_to_version'
- | 'check_deployment_status'
- | 'get_deployed_workflow_state'
- | 'get_workflow_data'
- | 'get_workflow_logs'
- | 'get_block_outputs'
- | 'get_block_upstream_references'
- | 'set_global_workflow_variables'
- | 'set_environment_variables'
- | 'get_platform_actions'
- | 'search_documentation'
- | 'search_patterns'
- | 'update_workspace_mcp_server'
- | 'delete_workspace_mcp_server'
- | 'create_workspace_mcp_server'
- | 'list_workspace_mcp_servers'
- | 'deploy_api'
- | 'deploy_chat'
- | 'deploy_mcp'
- | 'redeploy'
- | 'generate_api_key'
- | 'oauth_get_auth_link'
- | 'oauth_request_access'
- | 'build'
- | 'run'
- | 'deploy'
- | 'auth'
- | 'knowledge'
- | 'knowledge_base'
- | 'table'
- | 'job'
- | 'agent'
- | 'custom_tool'
- | 'research'
- | 'plan'
- | 'debug'
- | 'edit'
- | 'fast_edit'
- | 'open_resource'
- | 'context_compaction'
-
-/**
- * Subagent identifiers dispatched via `subagent_start` SSE events.
- *
- * @example
- * ```json
- * { "type": "subagent_start", "subagent": "build" }
+ * { "type": "tool", "phase": "call", "toolName": "glob" }
+ * { "type": "tool", "phase": "call", "toolName": "function_execute", "ui": { "title": "Running code", "icon": "code" } }
* ```
+ * Stream `type` is `MothershipStreamV1EventType.tool` (`mothership-stream-v1`) with `phase: 'call'`.
*/
-export type SubagentName =
- | 'build'
- | 'deploy'
- | 'auth'
- | 'research'
- | 'knowledge'
- | 'table'
- | 'custom_tool'
- | 'superagent'
- | 'plan'
- | 'debug'
- | 'edit'
- | 'fast_edit'
- | 'run'
- | 'agent'
- | 'job'
- | 'file_write'
-export type ToolPhase =
- | 'workspace'
- | 'search'
- | 'management'
- | 'execution'
- | 'resource'
- | 'subagent'
+export const ToolPhase = {
+ workspace: 'workspace',
+ search: 'search',
+ management: 'management',
+ execution: 'execution',
+ resource: 'resource',
+ subagent: 'subagent',
+} as const
+export type ToolPhase = (typeof ToolPhase)[keyof typeof ToolPhase]
-export type ToolCallStatus = 'executing' | 'success' | 'error' | 'cancelled'
+export const ToolCallStatus = {
+ executing: 'executing',
+ success: 'success',
+ error: 'error',
+ cancelled: 'cancelled',
+} as const
+export type ToolCallStatus = (typeof ToolCallStatus)[keyof typeof ToolCallStatus]
export interface ToolCallResult {
success: boolean
@@ -194,7 +89,6 @@ export interface ToolCallResult {
error?: string
}
-/** A single tool call result entry in the generic Results resource tab. */
export interface GenericResourceEntry {
toolCallId: string
toolName: string
@@ -205,7 +99,6 @@ export interface GenericResourceEntry {
result?: ToolCallResult
}
-/** Accumulated feed of tool call results shown in the generic Results tab. */
export interface GenericResourceData {
entries: GenericResourceEntry[]
}
@@ -228,7 +121,7 @@ export interface ToolCallInfo {
phaseLabel?: string
params?: Record
calledBy?: string
- result?: { success: boolean; output?: unknown; error?: string }
+ result?: ToolCallResult
streamingArgs?: string
}
@@ -237,14 +130,17 @@ export interface OptionItem {
label: string
}
-export type ContentBlockType =
- | 'text'
- | 'tool_call'
- | 'subagent'
- | 'subagent_end'
- | 'subagent_text'
- | 'options'
- | 'stopped'
+export const ContentBlockType = {
+ text: 'text',
+ tool_call: 'tool_call',
+ subagent: 'subagent',
+ subagent_end: 'subagent_end',
+ subagent_text: 'subagent_text',
+ subagent_thinking: 'subagent_thinking',
+ options: 'options',
+ stopped: 'stopped',
+} as const
+export type ContentBlockType = (typeof ContentBlockType)[keyof typeof ContentBlockType]
export interface ContentBlock {
type: ContentBlockType
@@ -283,23 +179,19 @@ export interface ChatMessage {
requestId?: string
}
-export const SUBAGENT_LABELS: Record = {
- build: 'Build agent',
- deploy: 'Deploy agent',
- auth: 'Integration agent',
- research: 'Research agent',
- knowledge: 'Knowledge agent',
- table: 'Table agent',
- custom_tool: 'Custom Tool agent',
+export const SUBAGENT_LABELS: Record = {
+ workflow: 'Workflow Agent',
+ deploy: 'Deploy Agent',
+ auth: 'Auth Agent',
+ research: 'Research Agent',
+ knowledge: 'Knowledge Agent',
+ table: 'Table Agent',
+ custom_tool: 'Custom Tool Agent',
superagent: 'Superagent',
- plan: 'Plan agent',
- debug: 'Debug agent',
- edit: 'Edit agent',
- fast_edit: 'Build agent',
- run: 'Run agent',
- agent: 'Agent manager',
- job: 'Job agent',
- file_write: 'File Write',
+ run: 'Run Agent',
+ agent: 'Tools Agent',
+ job: 'Job Agent',
+ file: 'File Agent',
} as const
export interface ToolUIMetadata {
@@ -309,206 +201,127 @@ export interface ToolUIMetadata {
}
/**
- * Primary UI metadata for tools observed in the SSE stream.
- * Maps tool IDs to human-readable display names shown in the chat.
- * This is the single source of truth — server-sent `ui.title` values are not used.
+ * Default UI metadata for tools observed in the SSE stream.
+ * The backend may send `ui` on some `MothershipStreamV1EventType.tool` payloads (`phase: 'call'`);
+ * this map provides fallback metadata when `ui` is absent.
*/
-export const TOOL_UI_METADATA: Record = {
- // Workspace
- glob: { title: 'Searching workspace', phaseLabel: 'Workspace', phase: 'workspace' },
- grep: { title: 'Searching workspace', phaseLabel: 'Workspace', phase: 'workspace' },
- read: { title: 'Reading file', phaseLabel: 'Workspace', phase: 'workspace' },
- // Search
- search_online: { title: 'Searching online', phaseLabel: 'Search', phase: 'search' },
- scrape_page: { title: 'Reading webpage', phaseLabel: 'Search', phase: 'search' },
- get_page_contents: { title: 'Reading page', phaseLabel: 'Search', phase: 'search' },
- search_library_docs: { title: 'Searching docs', phaseLabel: 'Search', phase: 'search' },
- crawl_website: { title: 'Browsing website', phaseLabel: 'Search', phase: 'search' },
- // Execution
- function_execute: { title: 'Running code', phaseLabel: 'Code', phase: 'execution' },
- superagent: { title: 'Taking action', phaseLabel: 'Action', phase: 'execution' },
- run_workflow: { title: 'Running workflow', phaseLabel: 'Execution', phase: 'execution' },
- run_block: { title: 'Running block', phaseLabel: 'Execution', phase: 'execution' },
- run_from_block: { title: 'Running from block', phaseLabel: 'Execution', phase: 'execution' },
- run_workflow_until_block: {
- title: 'Running partial workflow',
- phaseLabel: 'Execution',
- phase: 'execution',
+export const TOOL_UI_METADATA: Record = {
+ [Glob.id]: {
+ title: 'Finding files',
+ phaseLabel: 'Workspace',
+ phase: 'workspace',
},
- complete_job: { title: 'Completing job', phaseLabel: 'Execution', phase: 'execution' },
- get_execution_summary: { title: 'Checking results', phaseLabel: 'Execution', phase: 'execution' },
- get_job_logs: { title: 'Checking logs', phaseLabel: 'Execution', phase: 'execution' },
- get_workflow_logs: { title: 'Checking logs', phaseLabel: 'Execution', phase: 'execution' },
- get_workflow_data: { title: 'Loading workflow', phaseLabel: 'Execution', phase: 'execution' },
- get_block_outputs: {
- title: 'Checking block outputs',
- phaseLabel: 'Execution',
- phase: 'execution',
+ [Grep.id]: {
+ title: 'Searching',
+ phaseLabel: 'Workspace',
+ phase: 'workspace',
},
- get_block_upstream_references: {
- title: 'Checking references',
- phaseLabel: 'Execution',
- phase: 'execution',
+ [ReadTool.id]: { title: 'Reading file', phaseLabel: 'Workspace', phase: 'workspace' },
+ [SearchOnline.id]: {
+ title: 'Searching online',
+ phaseLabel: 'Search',
+ phase: 'search',
+ },
+ [ScrapePage.id]: {
+ title: 'Scraping page',
+ phaseLabel: 'Search',
+ phase: 'search',
+ },
+ [GetPageContents.id]: {
+ title: 'Getting page contents',
+ phaseLabel: 'Search',
+ phase: 'search',
+ },
+ [SearchLibraryDocs.id]: {
+ title: 'Searching library docs',
+ phaseLabel: 'Search',
+ phase: 'search',
+ },
+ [ManageMcpTool.id]: {
+ title: 'MCP server action',
+ phaseLabel: 'Management',
+ phase: 'management',
},
- get_deployed_workflow_state: {
- title: 'Checking deployment',
- phaseLabel: 'Execution',
+ [ManageSkill.id]: {
+ title: 'Skill action',
+ phaseLabel: 'Management',
+ phase: 'management',
+ },
+ [UserMemory.id]: {
+ title: 'Accessing memory',
+ phaseLabel: 'Management',
+ phase: 'management',
+ },
+ [FunctionExecute.id]: {
+ title: 'Running code',
+ phaseLabel: 'Code',
phase: 'execution',
},
- check_deployment_status: {
- title: 'Checking deployment',
- phaseLabel: 'Execution',
+ [Superagent.id]: {
+ title: 'Executing action',
+ phaseLabel: 'Action',
phase: 'execution',
},
- // Workflows & folders
- create_workflow: { title: 'Creating workflow', phaseLabel: 'Resource', phase: 'resource' },
- delete_workflow: { title: 'Deleting workflow', phaseLabel: 'Resource', phase: 'resource' },
- edit_workflow: { title: 'Editing workflow', phaseLabel: 'Resource', phase: 'resource' },
- rename_workflow: { title: 'Renaming workflow', phaseLabel: 'Resource', phase: 'resource' },
- move_workflow: { title: 'Moving workflow', phaseLabel: 'Resource', phase: 'resource' },
- create_folder: { title: 'Creating folder', phaseLabel: 'Resource', phase: 'resource' },
- delete_folder: { title: 'Deleting folder', phaseLabel: 'Resource', phase: 'resource' },
- move_folder: { title: 'Moving folder', phaseLabel: 'Resource', phase: 'resource' },
- list_folders: { title: 'Browsing folders', phaseLabel: 'Resource', phase: 'resource' },
- list_user_workspaces: { title: 'Browsing workspaces', phaseLabel: 'Resource', phase: 'resource' },
- revert_to_version: { title: 'Restoring version', phaseLabel: 'Resource', phase: 'resource' },
- get_deployment_version: {
- title: 'Checking deployment',
+ [UserTable.id]: {
+ title: 'Managing table',
phaseLabel: 'Resource',
phase: 'resource',
},
- open_resource: { title: 'Opening resource', phaseLabel: 'Resource', phase: 'resource' },
- // Files
- workspace_file: { title: 'Working with files', phaseLabel: 'Resource', phase: 'resource' },
- download_to_workspace_file: {
- title: 'Downloading file',
+ [WorkspaceFile.id]: {
+ title: 'Editing file',
phaseLabel: 'Resource',
phase: 'resource',
},
- materialize_file: { title: 'Saving file', phaseLabel: 'Resource', phase: 'resource' },
- generate_image: { title: 'Generating image', phaseLabel: 'Resource', phase: 'resource' },
- generate_visualization: {
- title: 'Generating visualization',
+ [EDIT_CONTENT_TOOL_ID]: {
+ title: 'Applying file content',
phaseLabel: 'Resource',
phase: 'resource',
},
- // Tables & knowledge
- user_table: { title: 'Editing table', phaseLabel: 'Resource', phase: 'resource' },
- knowledge_base: { title: 'Updating knowledge base', phaseLabel: 'Resource', phase: 'resource' },
- // Jobs
- create_job: { title: 'Creating job', phaseLabel: 'Resource', phase: 'resource' },
- manage_job: { title: 'Updating job', phaseLabel: 'Management', phase: 'management' },
- update_job_history: { title: 'Updating job', phaseLabel: 'Management', phase: 'management' },
- job_respond: { title: 'Explaining job scheduled', phaseLabel: 'Execution', phase: 'execution' },
- // Management
- manage_mcp_tool: { title: 'Updating integration', phaseLabel: 'Management', phase: 'management' },
- manage_skill: { title: 'Updating skill', phaseLabel: 'Management', phase: 'management' },
- manage_credential: { title: 'Connecting account', phaseLabel: 'Management', phase: 'management' },
- manage_custom_tool: { title: 'Updating tool', phaseLabel: 'Management', phase: 'management' },
- update_workspace_mcp_server: {
- title: 'Updating MCP server',
- phaseLabel: 'Management',
- phase: 'management',
- },
- delete_workspace_mcp_server: {
- title: 'Removing MCP server',
- phaseLabel: 'Management',
- phase: 'management',
+ [CreateWorkflow.id]: {
+ title: 'Creating workflow',
+ phaseLabel: 'Resource',
+ phase: 'resource',
},
- create_workspace_mcp_server: {
- title: 'Creating MCP server',
- phaseLabel: 'Management',
- phase: 'management',
+ [EditWorkflow.id]: {
+ title: 'Editing workflow',
+ phaseLabel: 'Resource',
+ phase: 'resource',
},
- list_workspace_mcp_servers: {
- title: 'Browsing MCP servers',
- phaseLabel: 'Management',
- phase: 'management',
+ [Workflow.id]: { title: 'Workflow Agent', phaseLabel: 'Workflow', phase: 'subagent' },
+ [RUN_SUBAGENT_ID]: { title: 'Run Agent', phaseLabel: 'Run', phase: 'subagent' },
+ [Deploy.id]: { title: 'Deploy Agent', phaseLabel: 'Deploy', phase: 'subagent' },
+ [Auth.id]: {
+ title: 'Auth Agent',
+ phaseLabel: 'Auth',
+ phase: 'subagent',
},
- oauth_get_auth_link: {
- title: 'Connecting account',
- phaseLabel: 'Management',
- phase: 'management',
+ [Knowledge.id]: {
+ title: 'Knowledge Agent',
+ phaseLabel: 'Knowledge',
+ phase: 'subagent',
},
- oauth_request_access: {
- title: 'Connecting account',
- phaseLabel: 'Management',
- phase: 'management',
+ [KnowledgeBase.id]: {
+ title: 'Managing knowledge base',
+ phaseLabel: 'Resource',
+ phase: 'resource',
},
- set_environment_variables: {
- title: 'Updating environment',
- phaseLabel: 'Management',
- phase: 'management',
+ [Table.id]: { title: 'Table Agent', phaseLabel: 'Table', phase: 'subagent' },
+ [Job.id]: { title: 'Job Agent', phaseLabel: 'Job', phase: 'subagent' },
+ [Agent.id]: { title: 'Tools Agent', phaseLabel: 'Agent', phase: 'subagent' },
+ custom_tool: {
+ title: 'Creating tool',
+ phaseLabel: 'Tool',
+ phase: 'subagent',
},
- set_global_workflow_variables: {
- title: 'Updating variables',
- phaseLabel: 'Management',
- phase: 'management',
+ [Research.id]: { title: 'Research Agent', phaseLabel: 'Research', phase: 'subagent' },
+ [OpenResource.id]: {
+ title: 'Opening resource',
+ phaseLabel: 'Resource',
+ phase: 'resource',
},
- get_platform_actions: { title: 'Loading actions', phaseLabel: 'Management', phase: 'management' },
- search_documentation: { title: 'Searching docs', phaseLabel: 'Search', phase: 'search' },
- search_patterns: { title: 'Searching patterns', phaseLabel: 'Search', phase: 'search' },
- deploy_api: { title: 'Deploying API', phaseLabel: 'Deploy', phase: 'management' },
- deploy_chat: { title: 'Deploying chat', phaseLabel: 'Deploy', phase: 'management' },
- deploy_mcp: { title: 'Deploying MCP', phaseLabel: 'Deploy', phase: 'management' },
- redeploy: { title: 'Redeploying', phaseLabel: 'Deploy', phase: 'management' },
- generate_api_key: { title: 'Generating API key', phaseLabel: 'Deploy', phase: 'management' },
- user_memory: { title: 'Updating memory', phaseLabel: 'Management', phase: 'management' },
- context_write: { title: 'Writing notes', phaseLabel: 'Management', phase: 'management' },
context_compaction: {
- title: 'Optimizing context',
- phaseLabel: 'Management',
+ title: 'Compacted context',
+ phaseLabel: 'Context',
phase: 'management',
},
- // Subagents
- build: { title: 'Building', phaseLabel: 'Build', phase: 'subagent' },
- run: { title: 'Running', phaseLabel: 'Run', phase: 'subagent' },
- deploy: { title: 'Deploying', phaseLabel: 'Deploy', phase: 'subagent' },
- auth: { title: 'Connecting integration', phaseLabel: 'Auth', phase: 'subagent' },
- knowledge: { title: 'Working with knowledge', phaseLabel: 'Knowledge', phase: 'subagent' },
- table: { title: 'Working with tables', phaseLabel: 'Table', phase: 'subagent' },
- job: { title: 'Working with jobs', phaseLabel: 'Job', phase: 'subagent' },
- agent: { title: 'Taking action', phaseLabel: 'Agent', phase: 'subagent' },
- custom_tool: { title: 'Creating tool', phaseLabel: 'Tool', phase: 'subagent' },
- research: { title: 'Researching', phaseLabel: 'Research', phase: 'subagent' },
- plan: { title: 'Planning', phaseLabel: 'Plan', phase: 'subagent' },
- debug: { title: 'Debugging', phaseLabel: 'Debug', phase: 'subagent' },
- edit: { title: 'Editing workflow', phaseLabel: 'Edit', phase: 'subagent' },
- fast_edit: { title: 'Editing workflow', phaseLabel: 'Edit', phase: 'subagent' },
-}
-
-export interface SSEPayloadUI {
- hidden?: boolean
- title?: string
- phaseLabel?: string
- icon?: string
- internal?: boolean
- clientExecutable?: boolean
-}
-
-export interface SSEPayloadData {
- name?: string
- ui?: SSEPayloadUI
- id?: string
- agent?: string
- partial?: boolean
- arguments?: Record
- input?: Record
- result?: unknown
- error?: string
-}
-
-export interface SSEPayload {
- type: SSEEventType | (string & {})
- chatId?: string
- data?: string | SSEPayloadData
- content?: string
- toolCallId?: string
- toolName?: string
- ui?: SSEPayloadUI
- success?: boolean
- result?: unknown
- error?: string
- subagent?: string
- resource?: { type: MothershipResourceType; id: string; title: string }
}
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/index.ts b/apps/sim/app/workspace/[workspaceId]/logs/components/index.ts
index 1a907cfd896..3c0c5922adf 100644
--- a/apps/sim/app/workspace/[workspaceId]/logs/components/index.ts
+++ b/apps/sim/app/workspace/[workspaceId]/logs/components/index.ts
@@ -1,5 +1,5 @@
export { Dashboard } from './dashboard'
-export { LogDetails } from './log-details'
+export { LogDetails, WorkflowOutputSection } from './log-details'
export { ExecutionSnapshot } from './log-details/components/execution-snapshot'
export { FileCards } from './log-details/components/file-download'
export { TraceSpans } from './log-details/components/trace-spans'
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/index.ts b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/index.ts
index 5d9685fdbd3..47078f505c1 100644
--- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/index.ts
+++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/index.ts
@@ -1 +1 @@
-export { LogDetails } from './log-details'
+export { LogDetails, WorkflowOutputSection } from './log-details'
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx
index 963aba17143..3c8f4e499d1 100644
--- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx
@@ -45,7 +45,7 @@ import { MAX_LOG_DETAILS_WIDTH_RATIO, MIN_LOG_DETAILS_WIDTH } from '@/stores/log
/**
* Workflow Output section with code viewer, copy, search, and context menu functionality
*/
-const WorkflowOutputSection = memo(
+export const WorkflowOutputSection = memo(
function WorkflowOutputSection({ output }: { output: Record }) {
const contentRef = useRef(null)
const [copied, setCopied] = useState(false)
diff --git a/apps/sim/app/workspace/[workspaceId]/settings/[section]/settings.tsx b/apps/sim/app/workspace/[workspaceId]/settings/[section]/settings.tsx
index 4642fc9e843..2310ba2652e 100644
--- a/apps/sim/app/workspace/[workspaceId]/settings/[section]/settings.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/settings/[section]/settings.tsx
@@ -142,6 +142,13 @@ const Admin = dynamic(
import('@/app/workspace/[workspaceId]/settings/components/admin/admin').then((m) => m.Admin),
{ loading: () => }
)
+const Mothership = dynamic(
+ () =>
+ import('@/app/workspace/[workspaceId]/settings/components/mothership/mothership').then(
+ (m) => m.Mothership
+ ),
+ { loading: () => }
+)
const RecentlyDeleted = dynamic(
() =>
import(
@@ -182,7 +189,9 @@ export function SettingsPage({ section }: SettingsPageProps) {
? 'general'
: section === 'admin' && !sessionLoading && !isAdminRole
? 'general'
- : section
+ : section === 'mothership' && !sessionLoading && !isAdminRole
+ ? 'general'
+ : section
const label =
allNavigationItems.find((item) => item.id === effectiveSection)?.label ?? effectiveSection
@@ -215,6 +224,7 @@ export function SettingsPage({ section }: SettingsPageProps) {
{effectiveSection === 'inbox' && }
{effectiveSection === 'recently-deleted' && }
{effectiveSection === 'admin' && }
+ {effectiveSection === 'mothership' && }
)
}
diff --git a/apps/sim/app/workspace/[workspaceId]/settings/components/mothership/mothership.tsx b/apps/sim/app/workspace/[workspaceId]/settings/components/mothership/mothership.tsx
new file mode 100644
index 00000000000..fac175177fa
--- /dev/null
+++ b/apps/sim/app/workspace/[workspaceId]/settings/components/mothership/mothership.tsx
@@ -0,0 +1,908 @@
+'use client'
+
+import { useCallback, useMemo, useState } from 'react'
+import { Badge, Button, Input as EmcnInput, Label, Skeleton } from '@/components/emcn'
+import { cn } from '@/lib/core/utils/cn'
+import {
+ type MothershipEnv,
+ useGenerateLicense,
+ useMothershipEnterpriseStats,
+ useMothershipLicenses,
+ useMothershipRequests,
+ useMothershipTrace,
+ useMothershipUserBreakdown,
+} from '@/hooks/queries/mothership-admin'
+
+type Tab = 'overview' | 'licenses' | 'enterprise' | 'traces'
+
+const TABS: { id: Tab; label: string }[] = [
+ { id: 'overview', label: 'Overview' },
+ { id: 'licenses', label: 'Licenses' },
+ { id: 'enterprise', label: 'Enterprise' },
+ { id: 'traces', label: 'Traces' },
+]
+
+const ENV_OPTIONS: { id: MothershipEnv; label: string }[] = [
+ { id: 'dev', label: 'Dev' },
+ { id: 'staging', label: 'Staging' },
+ { id: 'prod', label: 'Prod' },
+]
+
+function defaultTimeRange() {
+ const end = new Date()
+ const start = new Date()
+ start.setDate(start.getDate() - 7)
+ return {
+ start: start.toISOString().slice(0, 16),
+ end: end.toISOString().slice(0, 16),
+ }
+}
+
+function toRFC3339(local: string) {
+ if (!local) return ''
+ return new Date(local).toISOString()
+}
+
+function formatCost(cost: number) {
+ return `$${cost.toFixed(4)}`
+}
+
+function formatDate(d: string | null | undefined) {
+ if (!d) return '—'
+ return new Date(d).toLocaleString()
+}
+
+function Divider() {
+ return