From 325a666a8b0e6f4d3cf4b42cebfbb86dc8544846 Mon Sep 17 00:00:00 2001 From: Waleed Date: Wed, 17 Sep 2025 12:28:22 -0700 Subject: [PATCH 1/6] improvement(landing): insert prompt into copilot panel from landing, open panel on entry (#1363) * update infra and remove railway * improvement(landing): insert prompt into copilot panel from landing, open panel on entry * Revert "update infra and remove railway" This reverts commit abfa2f8d51901247acc6397960210569e84d72b1. * fixes * remove debug logs * go back to old env --- .../app/(landing)/components/hero/hero.tsx | 2 + .../copilot/components/welcome/welcome.tsx | 3 +- .../panel/components/copilot/copilot.tsx | 14 +- .../w/[workflowId]/components/panel/panel.tsx | 37 +++- apps/sim/lib/browser-storage.ts | 189 ++++++++++++++++++ apps/sim/stores/panel/store.ts | 4 + apps/sim/stores/panel/types.ts | 1 + 7 files changed, 241 insertions(+), 9 deletions(-) create mode 100644 apps/sim/lib/browser-storage.ts diff --git a/apps/sim/app/(landing)/components/hero/hero.tsx b/apps/sim/app/(landing)/components/hero/hero.tsx index 154edbe7fa..7169925df1 100644 --- a/apps/sim/app/(landing)/components/hero/hero.tsx +++ b/apps/sim/app/(landing)/components/hero/hero.tsx @@ -32,6 +32,7 @@ import { StripeIcon, SupabaseIcon, } from '@/components/icons' +import { LandingPromptStorage } from '@/lib/browser-storage' import { soehne } from '@/app/fonts/soehne/soehne' import { CARD_WIDTH, @@ -271,6 +272,7 @@ export default function Hero() { */ const handleSubmit = () => { if (!isEmpty) { + LandingPromptStorage.store(textValue) router.push('/signup') } } diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/welcome/welcome.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/welcome/welcome.tsx index 79415b13ad..84aafbbd03 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/welcome/welcome.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/welcome/welcome.tsx @@ -1,6 +1,6 @@ 'use client' -import { Blocks, Bot, LibraryBig, Workflow } from 'lucide-react' +import { Blocks, LibraryBig, Workflow } from 'lucide-react' interface CopilotWelcomeProps { onQuestionClick?: (question: string) => void @@ -59,7 +59,6 @@ export function CopilotWelcome({ onQuestionClick, mode = 'ask' }: CopilotWelcome
{/* Header */}
-

{subtitle}

diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/copilot.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/copilot.tsx index 8bfa7a0e6e..1665032e7a 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/copilot.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/copilot.tsx @@ -30,6 +30,7 @@ interface CopilotProps { interface CopilotRef { createNewChat: () => void + setInputValueAndFocus: (value: string) => void } export const Copilot = forwardRef(({ panelWidth }, ref) => { @@ -326,13 +327,24 @@ export const Copilot = forwardRef(({ panelWidth }, ref }, 100) // Small delay to ensure DOM updates are complete }, [createNewChat]) + const handleSetInputValueAndFocus = useCallback( + (value: string) => { + setInputValue(value) + setTimeout(() => { + userInputRef.current?.focus() + }, 150) + }, + [setInputValue] + ) + // Expose functions to parent useImperativeHandle( ref, () => ({ createNewChat: handleStartNewChat, + setInputValueAndFocus: handleSetInputValueAndFocus, }), - [handleStartNewChat] + [handleStartNewChat, handleSetInputValueAndFocus] ) // Handle abort action diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx index 6c26be1b9b..e0d2a7a51c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx @@ -9,6 +9,7 @@ import { } from '@/components/ui/dropdown-menu' import { ScrollArea } from '@/components/ui/scroll-area' import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip' +import { LandingPromptStorage } from '@/lib/browser-storage' import { createLogger } from '@/lib/logs/console/logger' import { useCopilotStore } from '@/stores/copilot/store' import { useChatStore } from '@/stores/panel/chat/store' @@ -31,6 +32,7 @@ export function Panel() { const [resizeStartWidth, setResizeStartWidth] = useState(0) const copilotRef = useRef<{ createNewChat: () => void + setInputValueAndFocus: (value: string) => void }>(null) const lastLoadedWorkflowRef = useRef(null) @@ -289,17 +291,40 @@ export function Panel() { } }, [activeWorkflowId, copilotWorkflowId, ensureCopilotDataLoaded]) + useEffect(() => { + const storedPrompt = LandingPromptStorage.consume() + + if (storedPrompt && storedPrompt.trim().length > 0) { + setActiveTab('copilot') + if (!isOpen) { + togglePanel() + } + + setTimeout(() => { + if (copilotRef.current) { + copilotRef.current.setInputValueAndFocus(storedPrompt) + } else { + setTimeout(() => { + if (copilotRef.current) { + copilotRef.current.setInputValueAndFocus(storedPrompt) + } + }, 500) + } + }, 200) + } + }, []) // eslint-disable-line react-hooks/exhaustive-deps -- Run only on mount + return ( <> {/* Tab Selector - Always visible */}
)} - {/*
+
-
*/} +
)} ) } -// Fallback component while the verification form is loading function VerificationFormFallback() { return (
@@ -258,7 +245,7 @@ function VerificationFormFallback() { ) } -export function VerifyContent({ hasResendKey, baseUrl, isProduction }: VerifyContentProps) { +export function VerifyContent({ hasResendKey, isProduction }: VerifyContentProps) { return ( }> diff --git a/apps/sim/lib/auth.ts b/apps/sim/lib/auth.ts index 6140a50aa5..8ffdd3de48 100644 --- a/apps/sim/lib/auth.ts +++ b/apps/sim/lib/auth.ts @@ -147,7 +147,7 @@ export const auth = betterAuth({ }, emailAndPassword: { enabled: true, - requireEmailVerification: false, + requireEmailVerification: isProd, sendVerificationOnSignUp: false, throwOnMissingCredentials: true, throwOnInvalidCredentials: true, @@ -174,7 +174,6 @@ export const auth = betterAuth({ if (ctx.path.startsWith('/sign-up') && isTruthy(env.DISABLE_REGISTRATION)) throw new Error('Registration is disabled, please contact your admin.') - // Check email and domain whitelist for sign-in and sign-up if ( (ctx.path.startsWith('/sign-in') || ctx.path.startsWith('/sign-up')) && (env.ALLOWED_LOGIN_EMAILS || env.ALLOWED_LOGIN_DOMAINS) @@ -184,7 +183,6 @@ export const auth = betterAuth({ if (requestEmail) { let isAllowed = false - // Check specific email whitelist if (env.ALLOWED_LOGIN_EMAILS) { const allowedEmails = env.ALLOWED_LOGIN_EMAILS.split(',').map((email) => email.trim().toLowerCase() @@ -192,7 +190,6 @@ export const auth = betterAuth({ isAllowed = allowedEmails.includes(requestEmail) } - // Check domain whitelist if not already allowed if (!isAllowed && env.ALLOWED_LOGIN_DOMAINS) { const allowedDomains = env.ALLOWED_LOGIN_DOMAINS.split(',').map((domain) => domain.trim().toLowerCase() @@ -234,7 +231,6 @@ export const auth = betterAuth({ throw new Error('Email is required') } - // Validate email before sending OTP const validation = quickValidateEmail(data.email) if (!validation.isValid) { logger.warn('Email validation failed', { @@ -250,7 +246,6 @@ export const auth = betterAuth({ const html = await renderOTPEmail(data.otp, data.email, data.type) - // Send email via consolidated mailer (supports Resend, Azure, or logging fallback) const result = await sendEmail({ to: data.email, subject: getEmailSubject(data.type), @@ -259,7 +254,6 @@ export const auth = betterAuth({ emailType: 'transactional', }) - // If no email service is configured, log verification code for development if (!result.success && result.message.includes('no email service configured')) { logger.info('🔑 VERIFICATION CODE FOR LOGIN/SIGNUP', { email: data.email, @@ -300,7 +294,6 @@ export const auth = betterAuth({ redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/github-repo`, getUserInfo: async (tokens) => { try { - // Fetch user profile const profileResponse = await fetch('https://api.github.com/user', { headers: { Authorization: `Bearer ${tokens.accessToken}`, @@ -318,7 +311,6 @@ export const auth = betterAuth({ const profile = await profileResponse.json() - // If email is null, fetch emails separately if (!profile.email) { const emailsResponse = await fetch('https://api.github.com/user/emails', { headers: { @@ -330,7 +322,6 @@ export const auth = betterAuth({ if (emailsResponse.ok) { const emails = await emailsResponse.json() - // Find primary email or use the first one const primaryEmail = emails.find( (email: { primary: boolean; email: string; verified: boolean }) => @@ -366,7 +357,7 @@ export const auth = betterAuth({ }, }, - // Google providers for different purposes + // Google providers { providerId: 'google-email', clientId: env.GOOGLE_CLIENT_ID as string, @@ -378,7 +369,6 @@ export const auth = betterAuth({ 'https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/gmail.send', 'https://www.googleapis.com/auth/gmail.modify', - // 'https://www.googleapis.com/auth/gmail.readonly', 'https://www.googleapis.com/auth/gmail.labels', ], prompt: 'consent', @@ -598,11 +588,9 @@ export const auth = betterAuth({ try { logger.info('Creating Wealthbox user profile from token data') - // Generate a unique identifier since we can't fetch user info const uniqueId = `wealthbox-${Date.now()}` const now = new Date() - // Create a synthetic user profile return { id: uniqueId, name: 'Wealthbox User', @@ -625,8 +613,6 @@ export const auth = betterAuth({ clientSecret: env.SUPABASE_CLIENT_SECRET as string, authorizationUrl: 'https://api.supabase.com/v1/oauth/authorize', tokenUrl: 'https://api.supabase.com/v1/oauth/token', - // Supabase doesn't have a standard userInfo endpoint that works with our flow, - // so we use a dummy URL and rely on our custom getUserInfo implementation userInfoUrl: 'https://dummy-not-used.supabase.co', scopes: ['database.read', 'database.write', 'projects.read'], responseType: 'code', @@ -636,11 +622,9 @@ export const auth = betterAuth({ try { logger.info('Creating Supabase user profile from token data') - // Extract user identifier from tokens if possible let userId = 'supabase-user' if (tokens.idToken) { try { - // Try to decode the JWT to get user information const decodedToken = JSON.parse( Buffer.from(tokens.idToken.split('.')[1], 'base64').toString() ) @@ -654,12 +638,9 @@ export const auth = betterAuth({ } } - // Generate a unique enough identifier const uniqueId = `${userId}-${Date.now()}` - const now = new Date() - // Create a synthetic user profile since we can't fetch one return { id: uniqueId, name: 'Supabase User', @@ -721,7 +702,7 @@ export const auth = betterAuth({ return { id: profile.data.id, name: profile.data.name || 'X User', - email: `${profile.data.username}@x.com`, // Create synthetic email with username + email: `${profile.data.username}@x.com`, image: profile.data.profile_image_url, emailVerified: profile.data.verified || false, createdAt: now, @@ -774,7 +755,7 @@ export const auth = betterAuth({ name: profile.name || profile.display_name || 'Confluence User', email: profile.email || `${profile.account_id}@atlassian.com`, image: profile.picture || undefined, - emailVerified: true, // Assume verified since it's an Atlassian account + emailVerified: true, createdAt: now, updatedAt: now, } @@ -895,7 +876,7 @@ export const auth = betterAuth({ name: profile.name || profile.display_name || 'Jira User', email: profile.email || `${profile.account_id}@atlassian.com`, image: profile.picture || undefined, - emailVerified: true, // Assume verified since it's an Atlassian account + emailVerified: true, createdAt: now, updatedAt: now, } @@ -933,7 +914,7 @@ export const auth = betterAuth({ userInfoUrl: 'https://api.notion.com/v1/users/me', scopes: ['workspace.content', 'workspace.name', 'page.read', 'page.write'], responseType: 'code', - pkce: false, // Notion doesn't support PKCE + pkce: false, accessType: 'offline', authentication: 'basic', prompt: 'consent', @@ -943,7 +924,7 @@ export const auth = betterAuth({ const response = await fetch('https://api.notion.com/v1/users/me', { headers: { Authorization: `Bearer ${tokens.accessToken}`, - 'Notion-Version': '2022-06-28', // Specify the Notion API version + 'Notion-Version': '2022-06-28', }, }) @@ -1011,7 +992,7 @@ export const auth = betterAuth({ return { id: data.id, name: data.name || 'Reddit User', - email: `${data.name}@reddit.user`, // Reddit doesn't provide email in identity scope + email: `${data.name}@reddit.user`, image: data.icon_img || undefined, emailVerified: false, createdAt: now, @@ -1128,7 +1109,6 @@ export const auth = betterAuth({ let userId = 'slack-bot' if (tokens.idToken) { try { - // Try to decode the JWT to get user information const decodedToken = JSON.parse( Buffer.from(tokens.idToken.split('.')[1], 'base64').toString() ) @@ -1140,12 +1120,9 @@ export const auth = betterAuth({ } } - // Generate a unique enough identifier const uniqueId = `${userId}-${Date.now()}` - const now = new Date() - // Create a synthetic user profile since we can't fetch one return { id: uniqueId, name: 'Slack Bot', @@ -1230,7 +1207,6 @@ export const auth = betterAuth({ status: subscription.status, }) - // Sync usage limits for the new subscription try { await syncSubscriptionUsageLimits(subscription) } catch (error) { @@ -1241,7 +1217,6 @@ export const auth = betterAuth({ }) } - // Send welcome email for Pro and Team plans try { const { sendPlanWelcomeEmail } = await import('@/lib/billing') await sendPlanWelcomeEmail(subscription) @@ -1286,9 +1261,7 @@ export const auth = betterAuth({ referenceId: subscription.referenceId, }) - // Reset usage limits back to free tier defaults try { - // This will sync limits based on the now-inactive subscription (defaulting to free tier) await syncSubscriptionUsageLimits(subscription) logger.info('[onSubscriptionDeleted] Reset usage limits to free tier', { @@ -1311,7 +1284,6 @@ export const auth = betterAuth({ }) try { - // Handle invoice events switch (event.type) { case 'invoice.payment_succeeded': { await handleInvoicePaymentSucceeded(event) @@ -1347,13 +1319,11 @@ export const auth = betterAuth({ eventType: event.type, error, }) - throw error // Re-throw to signal webhook failure to Stripe + throw error } }, }), - // Add organization plugin as a separate entry in the plugins array organization({ - // Allow team plan subscribers to create organizations allowUserToCreateOrganization: async (user) => { const dbSubscriptions = await db .select() @@ -1457,11 +1427,9 @@ export const auth = betterAuth({ signUp: '/signup', error: '/error', verify: '/verify', - verifyRequest: '/verify-request', }, }) -// Server-side auth helpers export async function getSession() { const hdrs = await headers() return await auth.api.getSession({ diff --git a/apps/sim/middleware.ts b/apps/sim/middleware.ts index a4b330b12c..de4a800044 100644 --- a/apps/sim/middleware.ts +++ b/apps/sim/middleware.ts @@ -138,12 +138,7 @@ export async function middleware(request: NextRequest) { return NextResponse.redirect(new URL('/login', request.url)) } - // Check if user needs email verification - const requiresVerification = request.cookies.get('requiresEmailVerification') - if (requiresVerification?.value === 'true') { - return NextResponse.redirect(new URL('/verify', request.url)) - } - + // Email verification is enforced by Better Auth (server-side). No cookie gating here. return NextResponse.next() } diff --git a/docker-compose.local.yml b/docker-compose.local.yml index f9fe6d5995..8e55a43928 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -15,11 +15,6 @@ services: - NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000} - BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET:-your_auth_secret_here} - ENCRYPTION_KEY=${ENCRYPTION_KEY:-your_encryption_key_here} - - GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-placeholder} - - GOOGLE_CLIENT_SECRET=${GOOGLE_CLIENT_SECRET:-placeholder} - - GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-placeholder} - - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-placeholder} - - RESEND_API_KEY=${RESEND_API_KEY:-placeholder} - OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} - NEXT_PUBLIC_SOCKET_URL=${NEXT_PUBLIC_SOCKET_URL:-http://localhost:3002} depends_on: diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index 5bb40e2527..c41563da1a 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -14,11 +14,6 @@ services: - NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000} - BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET:-your_auth_secret_here} - ENCRYPTION_KEY=${ENCRYPTION_KEY:-your_encryption_key_here} - - GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-placeholder} - - GOOGLE_CLIENT_SECRET=${GOOGLE_CLIENT_SECRET:-placeholder} - - GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-placeholder} - - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-placeholder} - - RESEND_API_KEY=${RESEND_API_KEY:-placeholder} - OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} - SOCKET_SERVER_URL=${SOCKET_SERVER_URL:-http://localhost:3002} - NEXT_PUBLIC_SOCKET_URL=${NEXT_PUBLIC_SOCKET_URL:-http://localhost:3002} From 658cf11299c847f75af9b407d748171d5dc36bcb Mon Sep 17 00:00:00 2001 From: Waleed Date: Wed, 17 Sep 2025 17:17:55 -0700 Subject: [PATCH 4/6] improvement(idempotency): added atomic claims to prevent duplicate processing for long-running workflows (#1366) * improvement(idempotency): added atomic claims to prevent duplicate processing for long-running workflows * ack PR comments --- apps/sim/lib/idempotency/service.ts | 476 +++++++++++++++++++--------- 1 file changed, 322 insertions(+), 154 deletions(-) diff --git a/apps/sim/lib/idempotency/service.ts b/apps/sim/lib/idempotency/service.ts index 7098990840..48fb4ce8e1 100644 --- a/apps/sim/lib/idempotency/service.ts +++ b/apps/sim/lib/idempotency/service.ts @@ -19,12 +19,6 @@ export interface IdempotencyConfig { * Default: 'default' */ namespace?: string - - /** - * Enable database fallback when Redis is not available - * Default: true - */ - enableDatabaseFallback?: boolean } export interface IdempotencyResult { @@ -44,29 +38,30 @@ export interface IdempotencyResult { previousResult?: any /** - * Storage method used ('redis', 'database', 'memory') + * Storage method used ('redis', 'database') */ - storageMethod: 'redis' | 'database' | 'memory' + storageMethod: 'redis' | 'database' } export interface ProcessingResult { success: boolean result?: any error?: string + status?: 'in-progress' | 'completed' | 'failed' + startedAt?: number +} + +export interface AtomicClaimResult { + claimed: boolean + existingResult?: ProcessingResult + normalizedKey: string + storageMethod: 'redis' | 'database' } const DEFAULT_TTL = 60 * 60 * 24 * 7 // 7 days const REDIS_KEY_PREFIX = 'idempotency:' -const MEMORY_CACHE_SIZE = 1000 - -const memoryCache = new Map< - string, - { - result: any - timestamp: number - ttl: number - } ->() +const MAX_WAIT_TIME_MS = 300000 // 5 minutes max wait for in-progress operations +const POLL_INTERVAL_MS = 1000 // Check every 1 second for completion /** * Universal idempotency service for webhooks, triggers, and any other operations @@ -79,7 +74,6 @@ export class IdempotencyService { this.config = { ttlSeconds: config.ttlSeconds ?? DEFAULT_TTL, namespace: config.namespace ?? 'default', - enableDatabaseFallback: config.enableDatabaseFallback ?? true, } } @@ -139,70 +133,202 @@ export class IdempotencyService { logger.warn(`Redis idempotency check failed for ${normalizedKey}:`, error) } - if (this.config.enableDatabaseFallback) { - try { - const existing = await db - .select({ result: idempotencyKey.result, createdAt: idempotencyKey.createdAt }) - .from(idempotencyKey) - .where( - and( - eq(idempotencyKey.key, normalizedKey), - eq(idempotencyKey.namespace, this.config.namespace) - ) + // Always fallback to database when Redis is not available + try { + const existing = await db + .select({ result: idempotencyKey.result, createdAt: idempotencyKey.createdAt }) + .from(idempotencyKey) + .where( + and( + eq(idempotencyKey.key, normalizedKey), + eq(idempotencyKey.namespace, this.config.namespace) ) - .limit(1) - - if (existing.length > 0) { - const item = existing[0] - const isExpired = Date.now() - item.createdAt.getTime() > this.config.ttlSeconds * 1000 - - if (!isExpired) { - logger.debug(`Idempotency hit in database: ${normalizedKey}`) - return { - isFirstTime: false, - normalizedKey, - previousResult: item.result, - storageMethod: 'database', - } + ) + .limit(1) + + if (existing.length > 0) { + const item = existing[0] + const isExpired = Date.now() - item.createdAt.getTime() > this.config.ttlSeconds * 1000 + + if (!isExpired) { + logger.debug(`Idempotency hit in database: ${normalizedKey}`) + return { + isFirstTime: false, + normalizedKey, + previousResult: item.result, + storageMethod: 'database', } - await db - .delete(idempotencyKey) - .where(eq(idempotencyKey.key, normalizedKey)) - .catch((err) => logger.warn(`Failed to clean up expired key ${normalizedKey}:`, err)) } + await db + .delete(idempotencyKey) + .where(eq(idempotencyKey.key, normalizedKey)) + .catch((err) => logger.warn(`Failed to clean up expired key ${normalizedKey}:`, err)) + } + + logger.debug(`Idempotency miss in database: ${normalizedKey}`) + return { + isFirstTime: true, + normalizedKey, + storageMethod: 'database', + } + } catch (error) { + logger.error(`Database idempotency check failed for ${normalizedKey}:`, error) + throw new Error(`Failed to check idempotency: database unavailable`) + } + } - logger.debug(`Idempotency miss in database: ${normalizedKey}`) + /** + * Atomically claim an idempotency key for processing + * Returns true if successfully claimed, false if already exists + */ + async atomicallyClaim( + provider: string, + identifier: string, + additionalContext?: Record + ): Promise { + const normalizedKey = this.normalizeKey(provider, identifier, additionalContext) + const redisKey = `${REDIS_KEY_PREFIX}${normalizedKey}` + const inProgressResult: ProcessingResult = { + success: false, + status: 'in-progress', + startedAt: Date.now(), + } + + try { + const redis = getRedisClient() + if (redis) { + const claimed = await redis.set( + redisKey, + JSON.stringify(inProgressResult), + 'EX', + this.config.ttlSeconds, + 'NX' + ) + + if (claimed === 'OK') { + logger.debug(`Atomically claimed idempotency key in Redis: ${normalizedKey}`) + return { + claimed: true, + normalizedKey, + storageMethod: 'redis', + } + } + const existingData = await redis.get(redisKey) + const existingResult = existingData ? JSON.parse(existingData) : null + logger.debug(`Idempotency key already claimed in Redis: ${normalizedKey}`) return { - isFirstTime: true, + claimed: false, + existingResult, normalizedKey, - storageMethod: 'database', + storageMethod: 'redis', } - } catch (error) { - logger.warn(`Database idempotency check failed for ${normalizedKey}:`, error) } + } catch (error) { + logger.warn(`Redis atomic claim failed for ${normalizedKey}:`, error) } - const memoryEntry = memoryCache.get(normalizedKey) - if (memoryEntry) { - const isExpired = Date.now() - memoryEntry.timestamp > memoryEntry.ttl * 1000 - if (!isExpired) { - logger.debug(`Idempotency hit in memory: ${normalizedKey}`) + // Always fallback to database when Redis is not available + try { + const insertResult = await db + .insert(idempotencyKey) + .values({ + key: normalizedKey, + namespace: this.config.namespace, + result: inProgressResult, + createdAt: new Date(), + }) + .onConflictDoNothing() + .returning({ key: idempotencyKey.key }) + + if (insertResult.length > 0) { + logger.debug(`Atomically claimed idempotency key in database: ${normalizedKey}`) return { - isFirstTime: false, + claimed: true, normalizedKey, - previousResult: memoryEntry.result, - storageMethod: 'memory', + storageMethod: 'database', } } - memoryCache.delete(normalizedKey) + const existing = await db + .select({ result: idempotencyKey.result }) + .from(idempotencyKey) + .where( + and( + eq(idempotencyKey.key, normalizedKey), + eq(idempotencyKey.namespace, this.config.namespace) + ) + ) + .limit(1) + + const existingResult = + existing.length > 0 ? (existing[0].result as ProcessingResult) : undefined + logger.debug(`Idempotency key already claimed in database: ${normalizedKey}`) + return { + claimed: false, + existingResult, + normalizedKey, + storageMethod: 'database', + } + } catch (error) { + logger.error(`Database atomic claim failed for ${normalizedKey}:`, error) + throw new Error(`Failed to claim idempotency key: database unavailable`) } + } + + /** + * Wait for an in-progress operation to complete and return its result + */ + async waitForResult(normalizedKey: string, storageMethod: 'redis' | 'database'): Promise { + const startTime = Date.now() + const redisKey = `${REDIS_KEY_PREFIX}${normalizedKey}` + + while (Date.now() - startTime < MAX_WAIT_TIME_MS) { + try { + let currentResult: ProcessingResult | null = null + + if (storageMethod === 'redis') { + const redis = getRedisClient() + if (redis) { + const data = await redis.get(redisKey) + currentResult = data ? JSON.parse(data) : null + } + } else if (storageMethod === 'database') { + const existing = await db + .select({ result: idempotencyKey.result }) + .from(idempotencyKey) + .where( + and( + eq(idempotencyKey.key, normalizedKey), + eq(idempotencyKey.namespace, this.config.namespace) + ) + ) + .limit(1) + currentResult = existing.length > 0 ? (existing[0].result as ProcessingResult) : null + } + + if (currentResult?.status === 'completed') { + logger.debug(`Operation completed, returning result: ${normalizedKey}`) + if (currentResult.success === false) { + throw new Error(currentResult.error || 'Previous operation failed') + } + return currentResult.result as T + } + + if (currentResult?.status === 'failed') { + logger.debug(`Operation failed, throwing error: ${normalizedKey}`) + throw new Error(currentResult.error || 'Previous operation failed') + } - logger.debug(`Idempotency miss in memory: ${normalizedKey}`) - return { - isFirstTime: true, - normalizedKey, - storageMethod: 'memory', + await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS)) + } catch (error) { + if (error instanceof Error && error.message.includes('operation failed')) { + throw error + } + logger.warn(`Error while waiting for result ${normalizedKey}:`, error) + await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS)) + } } + + throw new Error(`Timeout waiting for idempotency operation to complete: ${normalizedKey}`) } /** @@ -211,7 +337,7 @@ export class IdempotencyService { async storeResult( normalizedKey: string, result: ProcessingResult, - storageMethod: 'redis' | 'database' | 'memory' + storageMethod: 'redis' | 'database' ): Promise { const serializedResult = JSON.stringify(result) @@ -232,62 +358,34 @@ export class IdempotencyService { logger.warn(`Failed to store result in Redis for ${normalizedKey}:`, error) } - if (this.config.enableDatabaseFallback && storageMethod !== 'memory') { - try { - await db - .insert(idempotencyKey) - .values({ - key: normalizedKey, - namespace: this.config.namespace, + // Always fallback to database when Redis is not available + try { + await db + .insert(idempotencyKey) + .values({ + key: normalizedKey, + namespace: this.config.namespace, + result: result, + createdAt: new Date(), + }) + .onConflictDoUpdate({ + target: [idempotencyKey.key, idempotencyKey.namespace], + set: { result: result, createdAt: new Date(), - }) - .onConflictDoUpdate({ - target: [idempotencyKey.key, idempotencyKey.namespace], - set: { - result: result, - createdAt: new Date(), - }, - }) - - logger.debug(`Stored idempotency result in database: ${normalizedKey}`) - return - } catch (error) { - logger.warn(`Failed to store result in database for ${normalizedKey}:`, error) - } - } - - memoryCache.set(normalizedKey, { - result, - timestamp: Date.now(), - ttl: this.config.ttlSeconds, - }) + }, + }) - if (memoryCache.size > MEMORY_CACHE_SIZE) { - const entries = Array.from(memoryCache.entries()) - const now = Date.now() - - entries.forEach(([key, entry]) => { - if (now - entry.timestamp > entry.ttl * 1000) { - memoryCache.delete(key) - } - }) - - if (memoryCache.size > MEMORY_CACHE_SIZE) { - const sortedEntries = entries - .filter(([key]) => memoryCache.has(key)) - .sort((a, b) => a[1].timestamp - b[1].timestamp) - - const toRemove = sortedEntries.slice(0, memoryCache.size - MEMORY_CACHE_SIZE) - toRemove.forEach(([key]) => memoryCache.delete(key)) - } + logger.debug(`Stored idempotency result in database: ${normalizedKey}`) + } catch (error) { + logger.error(`Failed to store result in database for ${normalizedKey}:`, error) + throw new Error(`Failed to store idempotency result: database unavailable`) } - - logger.debug(`Stored idempotency result in memory: ${normalizedKey}`) } /** - * Execute an operation with idempotency protection + * Execute an operation with idempotency protection using atomic claims + * Eliminates race conditions by claiming the key before execution */ async executeWithIdempotency( provider: string, @@ -295,68 +393,105 @@ export class IdempotencyService { operation: () => Promise, additionalContext?: Record ): Promise { - const idempotencyCheck = await this.checkIdempotency(provider, identifier, additionalContext) + const claimResult = await this.atomicallyClaim(provider, identifier, additionalContext) - if (!idempotencyCheck.isFirstTime) { - logger.info(`Skipping duplicate operation: ${idempotencyCheck.normalizedKey}`) + if (!claimResult.claimed) { + const existingResult = claimResult.existingResult - if (idempotencyCheck.previousResult?.success === false) { - throw new Error(idempotencyCheck.previousResult?.error || 'Previous operation failed') + if (existingResult?.status === 'completed') { + logger.info(`Returning cached result for: ${claimResult.normalizedKey}`) + if (existingResult.success === false) { + throw new Error(existingResult.error || 'Previous operation failed') + } + return existingResult.result as T + } + + if (existingResult?.status === 'failed') { + logger.info(`Previous operation failed for: ${claimResult.normalizedKey}`) + throw new Error(existingResult.error || 'Previous operation failed') + } + + if (existingResult?.status === 'in-progress') { + logger.info(`Waiting for in-progress operation: ${claimResult.normalizedKey}`) + return await this.waitForResult(claimResult.normalizedKey, claimResult.storageMethod) + } + + if (existingResult) { + return existingResult.result as T } - return idempotencyCheck.previousResult?.result as T + throw new Error(`Unexpected state: key claimed but no existing result found`) } try { - logger.debug(`Executing new operation: ${idempotencyCheck.normalizedKey}`) + logger.info(`Executing new operation: ${claimResult.normalizedKey}`) const result = await operation() await this.storeResult( - idempotencyCheck.normalizedKey, - { success: true, result }, - idempotencyCheck.storageMethod + claimResult.normalizedKey, + { success: true, result, status: 'completed' }, + claimResult.storageMethod ) + logger.debug(`Successfully completed operation: ${claimResult.normalizedKey}`) return result } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error' + await this.storeResult( - idempotencyCheck.normalizedKey, - { success: false, error: errorMessage }, - idempotencyCheck.storageMethod + claimResult.normalizedKey, + { success: false, error: errorMessage, status: 'failed' }, + claimResult.storageMethod ) + logger.warn(`Operation failed: ${claimResult.normalizedKey} - ${errorMessage}`) throw error } } /** - * Create an idempotency key from a webhook payload + * Create an idempotency key from a webhook payload following RFC best practices + * Priority order: + * 1. Standard webhook headers (webhook-id, x-webhook-id, etc.) + * 2. Event/message IDs from payload + * 3. Deterministic hash of stable payload fields (excluding timestamps) */ static createWebhookIdempotencyKey( webhookId: string, payload: any, headers?: Record ): string { + // 1. Check for standard webhook headers (RFC compliant) const webhookIdHeader = - headers?.['x-webhook-id'] || + headers?.['webhook-id'] || // Standard Webhooks spec + headers?.['x-webhook-id'] || // Legacy standard headers?.['x-shopify-webhook-id'] || headers?.['x-github-delivery'] || - headers?.['stripe-signature']?.split(',')[0] + headers?.['x-event-id'] // Generic event ID header if (webhookIdHeader) { return `${webhookId}:${webhookIdHeader}` } - const payloadId = payload?.id || payload?.event_id || payload?.message?.id || payload?.data?.id + // 2. Extract event/message IDs from payload (most reliable) + const payloadId = + payload?.id || + payload?.event_id || + payload?.eventId || + payload?.message?.id || + payload?.data?.id || + payload?.object?.id || + payload?.event?.id if (payloadId) { return `${webhookId}:${payloadId}` } + // 3. Create deterministic hash from stable payload fields (excluding timestamps) + const stablePayload = IdempotencyService.createStablePayloadForHashing(payload) const payloadHash = crypto .createHash('sha256') - .update(JSON.stringify(payload)) + .update(JSON.stringify(stablePayload)) .digest('hex') .substring(0, 16) @@ -364,29 +499,62 @@ export class IdempotencyService { } /** - * Create an idempotency key for Gmail polling + * Create a stable representation of the payload for hashing by removing + * timestamp and other volatile fields that change between requests */ - static createGmailIdempotencyKey(webhookId: string, emailId: string): string { - return `${webhookId}:${emailId}` - } + private static createStablePayloadForHashing(payload: any): any { + if (!payload || typeof payload !== 'object') { + return payload + } - /** - * Create an idempotency key for generic triggers - */ - static createTriggerIdempotencyKey( - triggerId: string, - eventId: string, - additionalContext?: Record - ): string { - const base = `${triggerId}:${eventId}` - if (additionalContext && Object.keys(additionalContext).length > 0) { - const contextStr = Object.keys(additionalContext) - .sort() - .map((key) => `${key}=${additionalContext[key]}`) - .join('&') - return `${base}:${contextStr}` + const volatileFields = [ + 'timestamp', + 'created_at', + 'updated_at', + 'sent_at', + 'received_at', + 'processed_at', + 'delivered_at', + 'attempt', + 'retry_count', + 'request_id', + 'trace_id', + 'span_id', + 'delivery_id', + 'webhook_timestamp', + ] + + const cleanPayload = { ...payload } + + const removeVolatileFields = (obj: any): any => { + if (!obj || typeof obj !== 'object') return obj + + if (Array.isArray(obj)) { + return obj.map(removeVolatileFields) + } + + const cleaned: any = {} + for (const [key, value] of Object.entries(obj)) { + const lowerKey = key.toLowerCase() + + if (volatileFields.some((field) => lowerKey.includes(field))) { + continue + } + + if (typeof value === 'string' && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) { + continue + } + if (typeof value === 'number' && value > 1000000000 && value < 9999999999) { + continue + } + + cleaned[key] = removeVolatileFields(value) + } + + return cleaned } - return base + + return removeVolatileFields(cleanPayload) } } From 6028b1f5c0a10a4ae8d9776f6c39ef0425d7105c Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Wed, 17 Sep 2025 18:56:54 -0700 Subject: [PATCH 5/6] fix(dockerfile): needs dummy db url (#1368) --- docker/app.Dockerfile | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile index 1d13e01f95..1e20f6d7ed 100644 --- a/docker/app.Dockerfile +++ b/docker/app.Dockerfile @@ -43,6 +43,14 @@ ENV NEXT_TELEMETRY_DISABLED=1 \ DOCKER_BUILD=1 WORKDIR /app + +# Provide dummy database URLs during image build so server code that imports @sim/db +# can be evaluated without crashing. Runtime environments should override these. +ARG DATABASE_URL="postgresql://user:pass@localhost:5432/dummy" +ARG POSTGRES_URL="postgresql://user:pass@localhost:5432/dummy" +ENV DATABASE_URL=${DATABASE_URL} +ENV POSTGRES_URL=${POSTGRES_URL} + RUN bun run build # ======================================== From d0b69455e238171f867930011581b42045d3c6a4 Mon Sep 17 00:00:00 2001 From: Adam Gough <77861281+aadamgough@users.noreply.github.com> Date: Wed, 17 Sep 2025 19:16:12 -0700 Subject: [PATCH 6/6] Improvement(sharepoint): added more operations in sharepoint (#1369) * added list tools * not working yet * improved read and create * added scopes * updated sharepoint tools * added greptile comments --------- Co-authored-by: Adam Gough --- .../docs/content/docs/en/tools/sharepoint.mdx | 63 ++++- apps/sim/blocks/blocks/sharepoint.ts | 194 +++++++++++++- apps/sim/lib/auth.ts | 1 + apps/sim/lib/oauth/oauth.ts | 1 + apps/sim/tools/registry.ts | 6 + apps/sim/tools/sharepoint/create_list.ts | 165 ++++++++++++ apps/sim/tools/sharepoint/get_list.ts | 243 ++++++++++++++++++ apps/sim/tools/sharepoint/index.ts | 6 + apps/sim/tools/sharepoint/types.ts | 71 +++++ apps/sim/tools/sharepoint/update_list.ts | 169 ++++++++++++ 10 files changed, 909 insertions(+), 10 deletions(-) create mode 100644 apps/sim/tools/sharepoint/create_list.ts create mode 100644 apps/sim/tools/sharepoint/get_list.ts create mode 100644 apps/sim/tools/sharepoint/update_list.ts diff --git a/apps/docs/content/docs/en/tools/sharepoint.mdx b/apps/docs/content/docs/en/tools/sharepoint.mdx index 9b42322def..b32b84a326 100644 --- a/apps/docs/content/docs/en/tools/sharepoint.mdx +++ b/apps/docs/content/docs/en/tools/sharepoint.mdx @@ -1,6 +1,6 @@ --- title: Sharepoint -description: Read and create pages +description: Work with pages and lists --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -61,7 +61,7 @@ In Sim, the SharePoint integration empowers your agents to create and access Sha ## Usage Instructions -Integrate Sharepoint into the workflow. Can read and create pages, and list sites. Requires OAuth. +Integrate SharePoint into the workflow. Read/create pages, list sites, and work with lists (read, create, update items). Requires OAuth. @@ -124,6 +124,65 @@ List details of all SharePoint sites | --------- | ---- | ----------- | | `site` | object | Information about the current SharePoint site | +### `sharepoint_create_list` + +Create a new list in a SharePoint site + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `siteId` | string | No | The ID of the SharePoint site \(internal use\) | +| `siteSelector` | string | No | Select the SharePoint site | +| `listDisplayName` | string | Yes | Display name of the list to create | +| `listDescription` | string | No | Description of the list | +| `listTemplate` | string | No | List template name \(e.g., 'genericList'\) | +| `pageContent` | string | No | Optional JSON of columns. Either a top-level array of column definitions or an object with \{ columns: \[...\] \}. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `list` | object | Created SharePoint list information | + +### `sharepoint_get_list` + +Get metadata (and optionally columns/items) for a SharePoint list + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `siteSelector` | string | No | Select the SharePoint site | +| `siteId` | string | No | The ID of the SharePoint site \(internal use\) | +| `listId` | string | No | The ID of the list to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `list` | object | Information about the SharePoint list | + +### `sharepoint_update_list` + +Update the properties (fields) on a SharePoint list item + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `siteSelector` | string | No | Select the SharePoint site | +| `siteId` | string | No | The ID of the SharePoint site \(internal use\) | +| `listId` | string | No | The ID of the list containing the item | +| `itemId` | string | Yes | The ID of the list item to update | +| `listItemFields` | object | Yes | Field values to update on the list item | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `item` | object | Updated SharePoint list item | + ## Notes diff --git a/apps/sim/blocks/blocks/sharepoint.ts b/apps/sim/blocks/blocks/sharepoint.ts index 11212c9d7b..176446bb62 100644 --- a/apps/sim/blocks/blocks/sharepoint.ts +++ b/apps/sim/blocks/blocks/sharepoint.ts @@ -1,13 +1,16 @@ import { MicrosoftSharepointIcon } from '@/components/icons' +import { createLogger } from '@/lib/logs/console/logger' import type { BlockConfig } from '@/blocks/types' import type { SharepointResponse } from '@/tools/sharepoint/types' +const logger = createLogger('SharepointBlock') + export const SharepointBlock: BlockConfig = { type: 'sharepoint', name: 'Sharepoint', - description: 'Read and create pages', + description: 'Work with pages and lists', longDescription: - 'Integrate Sharepoint into the workflow. Can read and create pages, and list sites. Requires OAuth.', + 'Integrate SharePoint into the workflow. Read/create pages, list sites, and work with lists (read, create, update items). Requires OAuth.', docsLink: 'https://docs.sim.ai/tools/sharepoint', category: 'tools', bgColor: '#E0E0E0', @@ -23,6 +26,9 @@ export const SharepointBlock: BlockConfig = { { label: 'Create Page', id: 'create_page' }, { label: 'Read Page', id: 'read_page' }, { label: 'List Sites', id: 'list_sites' }, + { label: 'Create List', id: 'create_list' }, + { label: 'Read List', id: 'read_list' }, + { label: 'Update List', id: 'update_list' }, ], }, // Sharepoint Credentials @@ -39,6 +45,8 @@ export const SharepointBlock: BlockConfig = { 'email', 'Files.Read', 'Files.ReadWrite', + 'Sites.Read.All', + 'Sites.ReadWrite.All', 'offline_access', ], placeholder: 'Select Microsoft account', @@ -64,7 +72,17 @@ export const SharepointBlock: BlockConfig = { placeholder: 'Select a site', dependsOn: ['credential'], mode: 'basic', - condition: { field: 'operation', value: ['create_page', 'read_page', 'list_sites'] }, + condition: { + field: 'operation', + value: [ + 'create_page', + 'read_page', + 'list_sites', + 'create_list', + 'read_list', + 'update_list', + ], + }, }, { @@ -86,13 +104,59 @@ export const SharepointBlock: BlockConfig = { mode: 'advanced', }, + { + id: 'listId', + title: 'List ID', + type: 'short-input', + layout: 'full', + placeholder: 'Enter list ID (GUID). Required for Update; optional for Read.', + canonicalParamId: 'listId', + condition: { field: 'operation', value: ['read_list', 'update_list'] }, + }, + + { + id: 'listItemId', + title: 'Item ID', + type: 'short-input', + layout: 'full', + placeholder: 'Enter item ID', + canonicalParamId: 'itemId', + condition: { field: 'operation', value: ['update_list'] }, + }, + + { + id: 'listDisplayName', + title: 'List Display Name', + type: 'short-input', + layout: 'full', + placeholder: 'Name of the list', + condition: { field: 'operation', value: 'create_list' }, + }, + + { + id: 'listTemplate', + title: 'List Template', + type: 'short-input', + layout: 'full', + placeholder: "Template (e.g., 'genericList')", + condition: { field: 'operation', value: 'create_list' }, + }, + { id: 'pageContent', title: 'Page Content', type: 'long-input', layout: 'full', - placeholder: 'Content of the page', - condition: { field: 'operation', value: 'create_page' }, + placeholder: 'Provide page content', + condition: { field: 'operation', value: ['create_list'] }, + }, + { + id: 'listDescription', + title: 'List Description', + type: 'long-input', + layout: 'full', + placeholder: 'Optional description', + condition: { field: 'operation', value: 'create_list' }, }, { @@ -106,9 +170,26 @@ export const SharepointBlock: BlockConfig = { mode: 'advanced', condition: { field: 'operation', value: 'create_page' }, }, + + { + id: 'listItemFields', + title: 'List Item Fields', + type: 'long-input', + layout: 'full', + placeholder: 'Enter list item fields', + canonicalParamId: 'listItemFields', + condition: { field: 'operation', value: 'update_list' }, + }, ], tools: { - access: ['sharepoint_create_page', 'sharepoint_read_page', 'sharepoint_list_sites'], + access: [ + 'sharepoint_create_page', + 'sharepoint_read_page', + 'sharepoint_list_sites', + 'sharepoint_create_list', + 'sharepoint_get_list', + 'sharepoint_update_list', + ], config: { tool: (params) => { switch (params.operation) { @@ -118,6 +199,12 @@ export const SharepointBlock: BlockConfig = { return 'sharepoint_read_page' case 'list_sites': return 'sharepoint_list_sites' + case 'create_list': + return 'sharepoint_create_list' + case 'read_list': + return 'sharepoint_get_list' + case 'update_list': + return 'sharepoint_update_list' default: throw new Error(`Invalid Sharepoint operation: ${params.operation}`) } @@ -128,12 +215,71 @@ export const SharepointBlock: BlockConfig = { // Use siteSelector if provided, otherwise use manualSiteId const effectiveSiteId = (siteSelector || manualSiteId || '').trim() + const { + itemId: providedItemId, + listItemId, + listItemFields, + includeColumns, + includeItems, + ...others + } = rest as any + + let parsedItemFields: any = listItemFields + if (typeof listItemFields === 'string' && listItemFields.trim()) { + try { + parsedItemFields = JSON.parse(listItemFields) + } catch (error) { + logger.error('Failed to parse listItemFields JSON', { + error: error instanceof Error ? error.message : String(error), + }) + } + } + // Ensure listItemFields is an object for the tool schema + if (typeof parsedItemFields !== 'object' || parsedItemFields === null) { + parsedItemFields = undefined + } + + // Sanitize item ID (required by tool) + const rawItemId = providedItemId ?? listItemId + const sanitizedItemId = + rawItemId === undefined || rawItemId === null + ? undefined + : String(rawItemId).trim() || undefined + + const coerceBoolean = (value: any) => { + if (typeof value === 'boolean') return value + if (typeof value === 'string') return value.toLowerCase() === 'true' + return undefined + } + + // Debug logging for update_list param mapping + if (others.operation === 'update_list') { + try { + logger.info('SharepointBlock update_list param check', { + siteId: effectiveSiteId || undefined, + listId: (others as any)?.listId, + listTitle: (others as any)?.listTitle, + itemId: sanitizedItemId, + hasItemFields: !!parsedItemFields && typeof parsedItemFields === 'object', + itemFieldKeys: + parsedItemFields && typeof parsedItemFields === 'object' + ? Object.keys(parsedItemFields) + : [], + }) + } catch {} + } + return { credential, siteId: effectiveSiteId || undefined, - pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined, + pageSize: others.pageSize ? Number.parseInt(others.pageSize as string, 10) : undefined, mimeType: mimeType, - ...rest, + ...others, + // Map to tool param names + itemId: sanitizedItemId, + listItemFields: parsedItemFields, + includeColumns: coerceBoolean(includeColumns), + includeItems: coerceBoolean(includeItems), } }, }, @@ -151,6 +297,18 @@ export const SharepointBlock: BlockConfig = { siteSelector: { type: 'string', description: 'Site selector' }, manualSiteId: { type: 'string', description: 'Manual site ID' }, pageSize: { type: 'number', description: 'Results per page' }, + // Create List operation inputs + listDisplayName: { type: 'string', description: 'List display name' }, + listDescription: { type: 'string', description: 'List description' }, + listTemplate: { type: 'string', description: 'List template' }, + // Read List operation inputs + listId: { type: 'string', description: 'List ID' }, + listTitle: { type: 'string', description: 'List title' }, + includeColumns: { type: 'boolean', description: 'Include columns in response' }, + includeItems: { type: 'boolean', description: 'Include items in response' }, + // Update List Item operation inputs + listItemId: { type: 'string', description: 'List item ID' }, + listItemFields: { type: 'string', description: 'List item fields' }, }, outputs: { sites: { @@ -158,5 +316,25 @@ export const SharepointBlock: BlockConfig = { description: 'An array of SharePoint site objects, each containing details such as id, name, and more.', }, + list: { + type: 'json', + description: 'SharePoint list object (id, displayName, name, webUrl, etc.)', + }, + item: { + type: 'json', + description: 'SharePoint list item with fields', + }, + items: { + type: 'json', + description: 'Array of SharePoint list items with fields', + }, + success: { + type: 'boolean', + description: 'Success status', + }, + error: { + type: 'string', + description: 'Error message', + }, }, } diff --git a/apps/sim/lib/auth.ts b/apps/sim/lib/auth.ts index 8ffdd3de48..e5183db575 100644 --- a/apps/sim/lib/auth.ts +++ b/apps/sim/lib/auth.ts @@ -565,6 +565,7 @@ export const auth = betterAuth({ 'email', 'Sites.Read.All', 'Sites.ReadWrite.All', + 'Sites.Manage.All', 'offline_access', ], responseType: 'code', diff --git a/apps/sim/lib/oauth/oauth.ts b/apps/sim/lib/oauth/oauth.ts index fe190e044b..7774ba1d51 100644 --- a/apps/sim/lib/oauth/oauth.ts +++ b/apps/sim/lib/oauth/oauth.ts @@ -260,6 +260,7 @@ export const OAUTH_PROVIDERS: Record = { 'email', 'Sites.Read.All', 'Sites.ReadWrite.All', + 'Sites.Manage.All', 'offline_access', ], }, diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index bb8615f8e9..a682c678cd 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -145,9 +145,12 @@ import { redditGetCommentsTool, redditGetPostsTool, redditHotPostsTool } from '@ import { s3GetObjectTool } from '@/tools/s3' import { searchTool as serperSearch } from '@/tools/serper' import { + sharepointCreateListTool, sharepointCreatePageTool, + sharepointGetListTool, sharepointListSitesTool, sharepointReadPageTool, + sharepointUpdateListItemTool, } from '@/tools/sharepoint' import { slackCanvasTool, slackMessageReaderTool, slackMessageTool } from '@/tools/slack' import { smsSendTool } from '@/tools/sms' @@ -364,6 +367,9 @@ export const tools: Record = { sharepoint_create_page: sharepointCreatePageTool, sharepoint_read_page: sharepointReadPageTool, sharepoint_list_sites: sharepointListSitesTool, + sharepoint_get_list: sharepointGetListTool, + sharepoint_create_list: sharepointCreateListTool, + sharepoint_update_list: sharepointUpdateListItemTool, // Provider chat tools // Provider chat tools - handled separately in agent blocks } diff --git a/apps/sim/tools/sharepoint/create_list.ts b/apps/sim/tools/sharepoint/create_list.ts new file mode 100644 index 0000000000..8f2546844f --- /dev/null +++ b/apps/sim/tools/sharepoint/create_list.ts @@ -0,0 +1,165 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { + SharepointCreateListResponse, + SharepointList, + SharepointToolParams, +} from '@/tools/sharepoint/types' +import type { ToolConfig } from '@/tools/types' + +const logger = createLogger('SharePointCreateList') + +export const createListTool: ToolConfig = { + id: 'sharepoint_create_list', + name: 'Create SharePoint List', + description: 'Create a new list in a SharePoint site', + version: '1.0', + + oauth: { + required: true, + provider: 'sharepoint', + additionalScopes: ['openid', 'profile', 'email', 'Sites.ReadWrite.All', 'offline_access'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'The access token for the SharePoint API', + }, + siteId: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'The ID of the SharePoint site (internal use)', + }, + siteSelector: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Select the SharePoint site', + }, + listDisplayName: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Display name of the list to create', + }, + listDescription: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Description of the list', + }, + listTemplate: { + type: 'string', + required: false, + visibility: 'user-only', + description: "List template name (e.g., 'genericList')", + }, + pageContent: { + type: 'string', + required: false, + visibility: 'user-only', + description: + 'Optional JSON of columns. Either a top-level array of column definitions or an object with { columns: [...] }.', + }, + }, + + request: { + url: (params) => { + const siteId = params.siteSelector || params.siteId || 'root' + return `https://graph.microsoft.com/v1.0/sites/${siteId}/lists` + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + }), + body: (params) => { + if (!params.listDisplayName) { + throw new Error('listDisplayName is required') + } + + // Derive columns from pageContent JSON (object or string) or top-level array + let columns: unknown[] | undefined + if (params.pageContent) { + if (typeof params.pageContent === 'string') { + try { + const parsed = JSON.parse(params.pageContent) + if (Array.isArray(parsed)) columns = parsed + else if (parsed && Array.isArray((parsed as any).columns)) + columns = (parsed as any).columns + } catch (error) { + logger.warn('Invalid JSON in pageContent for create list; ignoring', { + error: error instanceof Error ? error.message : String(error), + }) + } + } else if (typeof params.pageContent === 'object') { + const pc: any = params.pageContent + if (Array.isArray(pc)) columns = pc + else if (pc && Array.isArray(pc.columns)) columns = pc.columns + } + } + + const payload: any = { + displayName: params.listDisplayName, + description: params.listDescription, + list: { template: params.listTemplate || 'genericList' }, + } + if (columns && columns.length > 0) payload.columns = columns + + logger.info('Creating SharePoint list', { + displayName: payload.displayName, + template: payload.list.template, + hasDescription: !!payload.description, + }) + + return payload + }, + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + const list: SharepointList = { + id: data.id, + displayName: data.displayName ?? data.name, + name: data.name, + webUrl: data.webUrl, + createdDateTime: data.createdDateTime, + lastModifiedDateTime: data.lastModifiedDateTime, + list: data.list, + } + + logger.info('SharePoint list created successfully', { + listId: list.id, + displayName: list.displayName, + }) + + return { + success: true, + output: { list }, + } + }, + + outputs: { + list: { + type: 'object', + description: 'Created SharePoint list information', + properties: { + id: { type: 'string', description: 'The unique ID of the list' }, + displayName: { type: 'string', description: 'The display name of the list' }, + name: { type: 'string', description: 'The internal name of the list' }, + webUrl: { type: 'string', description: 'The web URL of the list' }, + createdDateTime: { type: 'string', description: 'When the list was created' }, + lastModifiedDateTime: { + type: 'string', + description: 'When the list was last modified', + }, + list: { type: 'object', description: 'List properties (e.g., template)' }, + }, + }, + }, +} diff --git a/apps/sim/tools/sharepoint/get_list.ts b/apps/sim/tools/sharepoint/get_list.ts new file mode 100644 index 0000000000..f8290d968c --- /dev/null +++ b/apps/sim/tools/sharepoint/get_list.ts @@ -0,0 +1,243 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { + SharepointGetListResponse, + SharepointList, + SharepointToolParams, +} from '@/tools/sharepoint/types' +import type { ToolConfig } from '@/tools/types' + +const logger = createLogger('SharePointGetList') + +export const getListTool: ToolConfig = { + id: 'sharepoint_get_list', + name: 'Get SharePoint List', + description: 'Get metadata (and optionally columns/items) for a SharePoint list', + version: '1.0', + + oauth: { + required: true, + provider: 'sharepoint', + additionalScopes: ['openid', 'profile', 'email', 'Sites.Read.All', 'offline_access'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'The access token for the SharePoint API', + }, + siteSelector: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Select the SharePoint site', + }, + siteId: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'The ID of the SharePoint site (internal use)', + }, + listId: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'The ID of the list to retrieve', + }, + }, + + request: { + url: (params) => { + const siteId = params.siteId || params.siteSelector || 'root' + + // If neither listId nor listTitle provided, list all lists in the site + if (!params.listId) { + const baseUrl = `https://graph.microsoft.com/v1.0/sites/${siteId}/lists` + const url = new URL(baseUrl) + const finalUrl = url.toString() + logger.info('SharePoint List All Lists URL', { finalUrl, siteId }) + return finalUrl + } + + const listSegment = params.listId + // Default to returning items when targeting a specific list unless explicitly disabled + const wantsItems = typeof params.includeItems === 'boolean' ? params.includeItems : true + + // If caller wants items for a specific list, prefer the items endpoint (no columns) + if (wantsItems && !params.includeColumns) { + const itemsUrl = new URL( + `https://graph.microsoft.com/v1.0/sites/${siteId}/lists/${listSegment}/items` + ) + itemsUrl.searchParams.set('$expand', 'fields') + const finalItemsUrl = itemsUrl.toString() + logger.info('SharePoint Get List Items URL', { + finalUrl: finalItemsUrl, + siteId, + listId: params.listId, + }) + return finalItemsUrl + } + + // Otherwise, fetch list metadata (optionally with columns/items via $expand) + const baseUrl = `https://graph.microsoft.com/v1.0/sites/${siteId}/lists/${listSegment}` + const url = new URL(baseUrl) + const expandParts: string[] = [] + if (params.includeColumns) expandParts.push('columns') + if (wantsItems) expandParts.push('items($expand=fields)') + if (expandParts.length > 0) url.searchParams.append('$expand', expandParts.join(',')) + + const finalUrl = url.toString() + logger.info('SharePoint Get List URL', { + finalUrl, + siteId, + listId: params.listId, + includeColumns: !!params.includeColumns, + includeItems: wantsItems, + }) + return finalUrl + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + // If the response is a collection of items (from the items endpoint) + if ( + Array.isArray((data as any).value) && + (data as any).value.length > 0 && + (data as any).value[0] && + 'fields' in (data as any).value[0] + ) { + const items = (data as any).value.map((i: any) => ({ + id: i.id, + fields: i.fields as Record, + })) + + const nextLink: string | undefined = (data as any)['@odata.nextLink'] + const nextPageToken = nextLink + ? (() => { + try { + const u = new URL(nextLink) + return u.searchParams.get('$skiptoken') || u.searchParams.get('$skip') || undefined + } catch { + return undefined + } + })() + : undefined + + return { + success: true, + output: { list: { items } as SharepointList, nextPageToken }, + } + } + + // If this is a collection of lists (site-level) + if (Array.isArray((data as any).value)) { + const lists: SharepointList[] = (data as any).value.map((l: any) => ({ + id: l.id, + displayName: l.displayName ?? l.name, + name: l.name, + webUrl: l.webUrl, + createdDateTime: l.createdDateTime, + lastModifiedDateTime: l.lastModifiedDateTime, + list: l.list, + })) + + const nextLink: string | undefined = (data as any)['@odata.nextLink'] + const nextPageToken = nextLink + ? (() => { + try { + const u = new URL(nextLink) + return u.searchParams.get('$skiptoken') || u.searchParams.get('$skip') || undefined + } catch { + return undefined + } + })() + : undefined + + return { + success: true, + output: { lists, nextPageToken }, + } + } + + // Single list response (with optional expands) + const list: SharepointList = { + id: data.id, + displayName: data.displayName ?? data.name, + name: data.name, + webUrl: data.webUrl, + createdDateTime: data.createdDateTime, + lastModifiedDateTime: data.lastModifiedDateTime, + list: data.list, + columns: Array.isArray(data.columns) + ? data.columns.map((c: any) => ({ + id: c.id, + name: c.name, + displayName: c.displayName, + description: c.description, + indexed: c.indexed, + enforcedUniqueValues: c.enforcedUniqueValues, + hidden: c.hidden, + readOnly: c.readOnly, + required: c.required, + columnGroup: c.columnGroup, + })) + : undefined, + items: Array.isArray(data.items) + ? data.items.map((i: any) => ({ id: i.id, fields: i.fields as Record })) + : undefined, + } + + return { + success: true, + output: { list }, + } + }, + + outputs: { + list: { + type: 'object', + description: 'Information about the SharePoint list', + properties: { + id: { type: 'string', description: 'The unique ID of the list' }, + displayName: { type: 'string', description: 'The display name of the list' }, + name: { type: 'string', description: 'The internal name of the list' }, + webUrl: { type: 'string', description: 'The web URL of the list' }, + createdDateTime: { type: 'string', description: 'When the list was created' }, + lastModifiedDateTime: { + type: 'string', + description: 'When the list was last modified', + }, + list: { type: 'object', description: 'List properties (e.g., template)' }, + columns: { + type: 'array', + description: 'List column definitions', + items: { type: 'object' }, + }, + items: { + type: 'array', + description: 'List items (with fields when expanded)', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Item ID' }, + fields: { type: 'object', description: 'Field values for the item' }, + }, + }, + }, + }, + }, + lists: { + type: 'array', + description: 'All lists in the site when no listId/title provided', + items: { type: 'object' }, + }, + }, +} diff --git a/apps/sim/tools/sharepoint/index.ts b/apps/sim/tools/sharepoint/index.ts index 702d29aec9..fbfcd80a3f 100644 --- a/apps/sim/tools/sharepoint/index.ts +++ b/apps/sim/tools/sharepoint/index.ts @@ -1,7 +1,13 @@ +import { createListTool } from '@/tools/sharepoint/create_list' import { createPageTool } from '@/tools/sharepoint/create_page' +import { getListTool } from '@/tools/sharepoint/get_list' import { listSitesTool } from '@/tools/sharepoint/list_sites' import { readPageTool } from '@/tools/sharepoint/read_page' +import { updateListItemTool } from '@/tools/sharepoint/update_list' export const sharepointCreatePageTool = createPageTool +export const sharepointCreateListTool = createListTool +export const sharepointGetListTool = getListTool export const sharepointListSitesTool = listSitesTool export const sharepointReadPageTool = readPageTool +export const sharepointUpdateListItemTool = updateListItemTool diff --git a/apps/sim/tools/sharepoint/types.ts b/apps/sim/tools/sharepoint/types.ts index 6ecddf4ff4..4ce4847c05 100644 --- a/apps/sim/tools/sharepoint/types.ts +++ b/apps/sim/tools/sharepoint/types.ts @@ -58,6 +58,39 @@ export interface SharepointPageContent { } | null } +export interface SharepointColumn { + id?: string + name?: string + displayName?: string + description?: string + indexed?: boolean + enforcedUniqueValues?: boolean + hidden?: boolean + readOnly?: boolean + required?: boolean + columnGroup?: string + [key: string]: unknown +} + +export interface SharepointListItem { + id: string + fields?: Record +} + +export interface SharepointList { + id: string + displayName?: string + name?: string + webUrl?: string + createdDateTime?: string + lastModifiedDateTime?: string + list?: { + template?: string + } + columns?: SharepointColumn[] + items?: SharepointListItem[] +} + export interface SharepointListSitesResponse extends ToolResponse { output: { sites: SharepointSite[] @@ -131,6 +164,18 @@ export interface SharepointToolParams { serverRelativePath?: string groupId?: string maxPages?: number + // Lists + listId?: string + listTitle?: string + includeColumns?: boolean + includeItems?: boolean + // Create List + listDisplayName?: string + listDescription?: string + listTemplate?: string + // Update List Item + itemId?: string + listItemFields?: Record } export interface GraphApiResponse { @@ -211,3 +256,29 @@ export type SharepointResponse = | SharepointCreatePageResponse | SharepointReadPageResponse | SharepointReadSiteResponse + | SharepointGetListResponse + | SharepointCreateListResponse + | SharepointUpdateListItemResponse + +export interface SharepointGetListResponse extends ToolResponse { + output: { + list?: SharepointList + lists?: SharepointList[] + nextPageToken?: string + } +} + +export interface SharepointCreateListResponse extends ToolResponse { + output: { + list: SharepointList + } +} + +export interface SharepointUpdateListItemResponse extends ToolResponse { + output: { + item: { + id: string + fields?: Record + } + } +} diff --git a/apps/sim/tools/sharepoint/update_list.ts b/apps/sim/tools/sharepoint/update_list.ts new file mode 100644 index 0000000000..c9b66513ac --- /dev/null +++ b/apps/sim/tools/sharepoint/update_list.ts @@ -0,0 +1,169 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { + SharepointToolParams, + SharepointUpdateListItemResponse, +} from '@/tools/sharepoint/types' +import type { ToolConfig } from '@/tools/types' + +const logger = createLogger('SharePointUpdateListItem') + +export const updateListItemTool: ToolConfig< + SharepointToolParams, + SharepointUpdateListItemResponse +> = { + id: 'sharepoint_update_list', + name: 'Update SharePoint List Item', + description: 'Update the properties (fields) on a SharePoint list item', + version: '1.0', + + oauth: { + required: true, + provider: 'sharepoint', + additionalScopes: ['openid', 'profile', 'email', 'Sites.ReadWrite.All', 'offline_access'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'The access token for the SharePoint API', + }, + siteSelector: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Select the SharePoint site', + }, + siteId: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'The ID of the SharePoint site (internal use)', + }, + listId: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'The ID of the list containing the item', + }, + itemId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'The ID of the list item to update', + }, + listItemFields: { + type: 'object', + required: true, + visibility: 'user-only', + description: 'Field values to update on the list item', + }, + }, + + request: { + url: (params) => { + const siteId = params.siteId || params.siteSelector || 'root' + if (!params.itemId) throw new Error('itemId is required') + if (!params.listId) { + throw new Error('listId must be provided') + } + const listSegment = params.listId + return `https://graph.microsoft.com/v1.0/sites/${siteId}/lists/${listSegment}/items/${params.itemId}/fields` + }, + method: 'PATCH', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + 'Content-Type': 'application/json', + Accept: 'application/json', + }), + body: (params) => { + if (!params.listItemFields || Object.keys(params.listItemFields).length === 0) { + throw new Error('listItemFields must not be empty') + } + + // Filter out system/read-only fields that cannot be updated via Graph + const readOnlyFields = new Set([ + 'Id', + 'id', + 'UniqueId', + 'GUID', + 'ContentTypeId', + 'Created', + 'Modified', + 'Author', + 'Editor', + 'CreatedBy', + 'ModifiedBy', + 'AuthorId', + 'EditorId', + '_UIVersionString', + 'Attachments', + 'FileRef', + 'FileDirRef', + 'FileLeafRef', + ]) + + const entries = Object.entries(params.listItemFields) + const updatableEntries = entries.filter(([key]) => !readOnlyFields.has(key)) + + if (updatableEntries.length !== entries.length) { + const removed = entries.filter(([key]) => readOnlyFields.has(key)).map(([key]) => key) + logger.warn('Removed read-only SharePoint fields from update', { + removed, + }) + } + + if (updatableEntries.length === 0) { + const requestedKeys = Object.keys(params.listItemFields) + throw new Error( + `All provided fields are read-only and cannot be updated: ${requestedKeys.join(', ')}` + ) + } + + const sanitizedFields = Object.fromEntries(updatableEntries) + + logger.info('Updating SharePoint list item fields', { + listItemId: params.itemId, + listId: params.listId, + fieldsKeys: Object.keys(sanitizedFields), + }) + return sanitizedFields + }, + }, + + transformResponse: async (response: Response, params) => { + let fields: Record | undefined + if (response.status !== 204) { + try { + fields = await response.json() + } catch { + // Fall back to submitted fields if no body is returned + fields = params?.listItemFields + } + } else { + fields = params?.listItemFields + } + + return { + success: true, + output: { + item: { + id: params?.itemId!, + fields, + }, + }, + } + }, + + outputs: { + item: { + type: 'object', + description: 'Updated SharePoint list item', + properties: { + id: { type: 'string', description: 'Item ID' }, + fields: { type: 'object', description: 'Updated field values' }, + }, + }, + }, +}