diff --git a/apps/webapp/app/components/logs/LogDetailView.tsx b/apps/webapp/app/components/logs/LogDetailView.tsx index 6b3a76b8a8..a5cbb15aeb 100644 --- a/apps/webapp/app/components/logs/LogDetailView.tsx +++ b/apps/webapp/app/components/logs/LogDetailView.tsx @@ -234,7 +234,7 @@ function DetailsTab({ log, runPath, searchTerm }: { log: LogEntry; runPath: stri
Timestamp
- +
diff --git a/apps/webapp/app/components/logs/LogsTable.tsx b/apps/webapp/app/components/logs/LogsTable.tsx index a361d95c5e..2f6894a03e 100644 --- a/apps/webapp/app/components/logs/LogsTable.tsx +++ b/apps/webapp/app/components/logs/LogsTable.tsx @@ -162,7 +162,7 @@ export function LogsTable({ boxShadow: getLevelBoxShadow(log.level), }} > - + diff --git a/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts b/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts index e23e2f552c..d354018b82 100644 --- a/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts @@ -81,12 +81,17 @@ export class LogDetailPresenter { // Ignore parse errors } + const durationMs = (typeof log.duration === "number" ? log.duration : Number(log.duration)) / 1_000_000; + return { // Use :: separator to match LogsListPresenter format id: `${log.trace_id}::${log.span_id}::${log.run_id}::${log.start_time}`, runId: log.run_id, taskIdentifier: log.task_identifier, startTime: convertClickhouseDateTime64ToJsDate(log.start_time).toISOString(), + triggeredTimestamp: new Date( + convertClickhouseDateTime64ToJsDate(log.start_time).getTime() + durationMs + ).toISOString(), traceId: log.trace_id, spanId: log.span_id, parentSpanId: log.parent_span_id || null, diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index b1c03f8b74..545bca5cce 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -50,7 +50,6 @@ export type LogsListOptions = { retentionLimitDays?: number; // search search?: string; - includeDebugLogs?: boolean; // pagination direction?: Direction; cursor?: string; @@ -69,7 +68,6 @@ export const LogsListOptionsSchema = z.object({ defaultPeriod: z.string().optional(), retentionLimitDays: z.number().int().positive().optional(), search: z.string().max(1000).optional(), - includeDebugLogs: z.boolean().optional(), direction: z.enum(["forward", "backward"]).optional(), cursor: z.string().optional(), pageSize: z.number().int().positive().max(1000).optional(), @@ -83,14 +81,16 @@ export type LogsListAppliedFilters = LogsList["filters"]; // Cursor is a base64 encoded JSON of the pagination keys type LogCursor = { + organizationId: string; environmentId: string; - unixTimestamp: number; + triggeredTimestamp: string; // DateTime64(9) string traceId: string; }; const LogCursorSchema = z.object({ + organizationId: z.string(), environmentId: z.string(), - unixTimestamp: z.number(), + triggeredTimestamp: z.string(), traceId: z.string(), }); @@ -116,33 +116,16 @@ function decodeCursor(cursor: string): LogCursor | null { function levelToKindsAndStatuses(level: LogLevel): { kinds?: string[]; statuses?: string[] } { switch (level) { case "DEBUG": - return { kinds: ["DEBUG_EVENT", "LOG_DEBUG"] }; + return { kinds: ["LOG_DEBUG"] }; case "INFO": - return { kinds: ["LOG_INFO", "LOG_LOG"] }; + return { kinds: ["LOG_INFO", "LOG_LOG", "SPAN"] }; case "WARN": return { kinds: ["LOG_WARN"] }; case "ERROR": - return { kinds: ["LOG_ERROR"], statuses: ["ERROR"] }; + return { kinds: ["LOG_ERROR", "SPAN_EVENT"], statuses: ["ERROR"] }; } } -function convertDateToNanoseconds(date: Date): bigint { - return BigInt(date.getTime()) * 1_000_000n; -} - -function formatNanosecondsForClickhouse(ns: bigint): string { - const nsString = ns.toString(); - // Handle negative numbers (dates before 1970-01-01) - if (nsString.startsWith("-")) { - const absString = nsString.slice(1); - const padded = absString.padStart(19, "0"); - return "-" + padded.slice(0, 10) + "." + padded.slice(10); - } - // Pad positive numbers to 19 digits to ensure correct slicing - const padded = nsString.padStart(19, "0"); - return padded.slice(0, 10) + "." + padded.slice(10); -} - export class LogsListPresenter extends BasePresenter { constructor( private readonly replica: PrismaClientOrTransaction, @@ -166,7 +149,6 @@ export class LogsListPresenter extends BasePresenter { to, cursor, pageSize = DEFAULT_PAGE_SIZE, - includeDebugLogs = true, defaultPeriod, retentionLimitDays, }: LogsListOptions @@ -252,7 +234,7 @@ export class LogsListPresenter extends BasePresenter { ); } - const queryBuilder = this.clickhouse.taskEventsV2.logsListQueryBuilder(); + const queryBuilder = this.clickhouse.taskEventsSearch.logsListQueryBuilder(); queryBuilder.where("environment_id = {environmentId: String}", { environmentId, @@ -265,27 +247,16 @@ export class LogsListPresenter extends BasePresenter { if (effectiveFrom) { - const fromNs = convertDateToNanoseconds(effectiveFrom); - - queryBuilder.where("inserted_at >= {insertedAtStart: DateTime64(3)}", { - insertedAtStart: convertDateToClickhouseDateTime(effectiveFrom), + queryBuilder.where("triggered_timestamp >= {triggeredAtStart: DateTime64(3)}", { + triggeredAtStart: convertDateToClickhouseDateTime(effectiveFrom), }); - - queryBuilder.where("start_time >= {fromTime: String}", { - fromTime: formatNanosecondsForClickhouse(fromNs), - }); } if (effectiveTo) { const clampedTo = effectiveTo > new Date() ? new Date() : effectiveTo; - const toNs = convertDateToNanoseconds(clampedTo); - - queryBuilder.where("inserted_at <= {insertedAtEnd: DateTime64(3)}", { - insertedAtEnd: convertDateToClickhouseDateTime(clampedTo), - }); - queryBuilder.where("start_time <= {toTime: String}", { - toTime: formatNanosecondsForClickhouse(toNs), + queryBuilder.where("triggered_timestamp <= {triggeredAtEnd: DateTime64(3)}", { + triggeredAtEnd: convertDateToClickhouseDateTime(clampedTo), }); } @@ -349,39 +320,20 @@ export class LogsListPresenter extends BasePresenter { } } - // Debug logs are available only to admins - if (includeDebugLogs === false) { - queryBuilder.where("kind NOT IN {debugKinds: Array(String)}", { - debugKinds: ["DEBUG_EVENT"], - }); - - queryBuilder.where("NOT ((kind = 'LOG_INFO') AND (attributes_text = '{}'))"); - } - - queryBuilder.where("kind NOT IN {debugSpans: Array(String)}", { - debugSpans: ["SPAN", "ANCESTOR_OVERRIDE", "SPAN_EVENT"], - }); - - // kindCondition += ` `; - // params["excluded_statuses"] = ["SPAN", "ANCESTOR_OVERRIDE", "SPAN_EVENT"]; - - - queryBuilder.where("NOT (kind = 'SPAN' AND status = 'PARTIAL')"); - - // Cursor pagination + // Cursor pagination using explicit lexicographic comparison + // Must mirror the ORDER BY columns: (organization_id, environment_id, triggered_timestamp, trace_id) const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { queryBuilder.where( - "(environment_id, toUnixTimestamp(start_time), trace_id) < ({cursorEnvId: String}, {cursorUnixTimestamp: Int64}, {cursorTraceId: String})", + `(triggered_timestamp < {cursorTriggeredTimestamp: String} OR (triggered_timestamp = {cursorTriggeredTimestamp: String} AND trace_id < {cursorTraceId: String}))`, { - cursorEnvId: decodedCursor.environmentId, - cursorUnixTimestamp: decodedCursor.unixTimestamp, + cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp, cursorTraceId: decodedCursor.traceId, } ); } - queryBuilder.orderBy("environment_id DESC, toUnixTimestamp(start_time) DESC, trace_id DESC"); + queryBuilder.orderBy("triggered_timestamp DESC, trace_id DESC"); // Limit + 1 to check if there are more results queryBuilder.limit(pageSize + 1); @@ -399,10 +351,10 @@ export class LogsListPresenter extends BasePresenter { let nextCursor: string | undefined; if (hasMore && logs.length > 0) { const lastLog = logs[logs.length - 1]; - const unixTimestamp = Math.floor(new Date(lastLog.start_time).getTime() / 1000); nextCursor = encodeCursor({ + organizationId, environmentId, - unixTimestamp, + triggeredTimestamp: lastLog.triggered_timestamp, traceId: lastLog.trace_id, }); } @@ -430,6 +382,9 @@ export class LogsListPresenter extends BasePresenter { runId: log.run_id, taskIdentifier: log.task_identifier, startTime: convertClickhouseDateTime64ToJsDate(log.start_time).toISOString(), + triggeredTimestamp: convertClickhouseDateTime64ToJsDate( + log.triggered_timestamp + ).toISOString(), traceId: log.trace_id, spanId: log.span_id, parentSpanId: log.parent_span_id || null, diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx index 84dbc2deda..9c2eccc8ea 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx @@ -36,7 +36,6 @@ import { ResizablePanel, ResizablePanelGroup, } from "~/components/primitives/Resizable"; -import { Switch } from "~/components/primitives/Switch"; import { Button } from "~/components/primitives/Buttons"; import { FEATURE_FLAG, validateFeatureFlagValue } from "~/v3/featureFlags.server"; @@ -95,7 +94,6 @@ async function hasLogsPageAccess( export const loader = async ({ request, params }: LoaderFunctionArgs) => { const user = await requireUser(request); const userId = user.id; - const isAdmin = user.admin || user.isImpersonating; const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); @@ -126,7 +124,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const runId = url.searchParams.get("runId") ?? undefined; const search = url.searchParams.get("search") ?? undefined; const levels = parseLevelsFromUrl(url); - const showDebug = url.searchParams.get("showDebug") === "true"; const period = url.searchParams.get("period") ?? undefined; const fromStr = url.searchParams.get("from"); const toStr = url.searchParams.get("to"); @@ -150,7 +147,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { period, from, to, - includeDebugLogs: isAdmin && showDebug, defaultPeriod: "1h", retentionLimitDays }) @@ -163,15 +159,13 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { return typeddefer({ data: listPromise, - isAdmin, - showDebug, defaultPeriod: "1h", retentionLimitDays, }); }; export default function Page() { - const { data, isAdmin, showDebug, defaultPeriod, retentionLimitDays } = + const { data, defaultPeriod, retentionLimitDays } = useTypedLoaderData(); return ( @@ -199,8 +193,6 @@ export default function Page() { errorElement={
@@ -218,8 +210,6 @@ export default function Page() { return (
@@ -235,15 +225,11 @@ export default function Page() {
@@ -258,14 +244,10 @@ export default function Page() { function FiltersBar({ list, - isAdmin, - showDebug, defaultPeriod, retentionLimitDays, }: { list?: Exclude["data"]>, { error: string }>; - isAdmin: boolean; - showDebug: boolean; defaultPeriod?: string; retentionLimitDays: number; }) { @@ -280,16 +262,6 @@ function FiltersBar({ searchParams.has("from") || searchParams.has("to"); - const handleDebugToggle = useCallback((checked: boolean) => { - const url = new URL(window.location.href); - if (checked) { - url.searchParams.set("showDebug", "true"); - } else { - url.searchParams.delete("showDebug"); - } - window.location.href = url.toString(); - }, []); - return (
@@ -329,16 +301,6 @@ function FiltersBar({ )}
-
- {isAdmin && ( - - )} -
); } @@ -347,8 +309,6 @@ function LogsList({ list, }: { list: Exclude["data"]>, { error: string }>; //exclude error, it is handled - isAdmin: boolean; - showDebug: boolean; defaultPeriod?: string; }) { const navigation = useNavigation(); diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx deleted file mode 100644 index 53bc655a02..0000000000 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx +++ /dev/null @@ -1,100 +0,0 @@ -import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { json } from "@remix-run/node"; -import { requireUserId } from "~/services/session.server"; -import { EnvironmentParamSchema } from "~/utils/pathBuilder"; -import { findProjectBySlug } from "~/models/project.server"; -import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; -import { clickhouseClient } from "~/services/clickhouseInstance.server"; - -// Convert ClickHouse kind to display level -function kindToLevel( - kind: string -): "TRACE" | "DEBUG" | "INFO" | "WARN" | "ERROR" | "LOG" { - switch (kind) { - case "DEBUG_EVENT": - case "LOG_DEBUG": - return "DEBUG"; - case "LOG_INFO": - return "INFO"; - case "LOG_WARN": - return "WARN"; - case "LOG_ERROR": - return "ERROR"; - case "LOG_LOG": - return "LOG"; - case "SPAN": - case "ANCESTOR_OVERRIDE": - case "SPAN_EVENT": - default: - return "TRACE"; - } -} - -// Fetch related spans for a log entry from the same trace -export const loader = async ({ request, params }: LoaderFunctionArgs) => { - const userId = await requireUserId(request); - const { projectParam, organizationSlug, envParam, logId } = { - ...EnvironmentParamSchema.parse(params), - logId: params.logId, - }; - - if (!logId) { - throw new Response("Log ID is required", { status: 400 }); - } - - const project = await findProjectBySlug(organizationSlug, projectParam, userId); - if (!project) { - throw new Response("Project not found", { status: 404 }); - } - - const environment = await findEnvironmentBySlug(project.id, envParam, userId); - if (!environment) { - throw new Response("Environment not found", { status: 404 }); - } - - // Get trace ID and run ID from query params - const url = new URL(request.url); - const traceId = url.searchParams.get("traceId"); - const runId = url.searchParams.get("runId"); - const currentSpanId = url.searchParams.get("spanId"); - - if (!traceId || !runId) { - throw new Response("Trace ID and Run ID are required", { status: 400 }); - } - - // Query ClickHouse for related spans in the same trace - const queryBuilder = clickhouseClient.taskEventsV2.logsListQueryBuilder(); - - queryBuilder.where("environment_id = {environmentId: String}", { - environmentId: environment.id, - }); - queryBuilder.where("trace_id = {traceId: String}", { traceId }); - queryBuilder.where("run_id = {runId: String}", { runId }); - - // Order by start time to show spans in chronological order - queryBuilder.orderBy("start_time ASC"); - queryBuilder.limit(50); - - const [queryError, records] = await queryBuilder.execute(); - - if (queryError) { - throw queryError; - } - - const results = records || []; - - const spans = results.map((row) => ({ - id: `${row.trace_id}::${row.span_id}::${row.run_id}::${row.start_time}`, - spanId: row.span_id, - parentSpanId: row.parent_span_id || null, - message: row.message.substring(0, 200), // Truncate for list view - kind: row.kind, - level: kindToLevel(row.kind), - status: row.status, - startTime: new Date(Number(row.start_time) / 1_000_000).toISOString(), - duration: Number(row.duration), - isCurrent: row.span_id === currentSpanId, - })); - - return json({ spans }); -}; diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts index 656e20472e..cac4c0f702 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts @@ -21,7 +21,6 @@ function parseLevelsFromUrl(url: URL): LogLevel[] | undefined { export const loader = async ({ request, params }: LoaderFunctionArgs) => { const user = await requireUser(request); const userId = user.id; - const isAdmin = user?.admin || user?.isImpersonating; const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); @@ -46,7 +45,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const search = url.searchParams.get("search") ?? undefined; const cursor = url.searchParams.get("cursor") ?? undefined; const levels = parseLevelsFromUrl(url); - const showDebug = url.searchParams.get("showDebug") === "true"; const period = url.searchParams.get("period") ?? undefined; const fromStr = url.searchParams.get("from"); const toStr = url.searchParams.get("to"); @@ -67,7 +65,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { from, to, levels, - includeDebugLogs: isAdmin && showDebug, defaultPeriod: "1h", retentionLimitDays, }) as any; // Validated by LogsListOptionsSchema at runtime diff --git a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql new file mode 100644 index 0000000000..60ed1eb1fc --- /dev/null +++ b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql @@ -0,0 +1,63 @@ +-- +goose Up +CREATE TABLE IF NOT EXISTS trigger_dev.task_events_search_v1 +( + environment_id String, + organization_id String, + project_id String, + triggered_timestamp DateTime64(9) CODEC(Delta(8), ZSTD(1)), + trace_id String CODEC(ZSTD(1)), + span_id String CODEC(ZSTD(1)), + run_id String CODEC(ZSTD(1)), + task_identifier String CODEC(ZSTD(1)), + start_time DateTime64(9) CODEC(Delta(8), ZSTD(1)), + inserted_at DateTime64(3), + message String CODEC(ZSTD(1)), + kind LowCardinality(String) CODEC(ZSTD(1)), + status LowCardinality(String) CODEC(ZSTD(1)), + duration UInt64 CODEC(ZSTD(1)), + parent_span_id String CODEC(ZSTD(1)), + attributes_text String CODEC(ZSTD(1)), + + INDEX idx_run_id run_id TYPE bloom_filter(0.001) GRANULARITY 1, + INDEX idx_message_text_search lower(message) TYPE ngrambf_v1(3, 32768, 2, 0) GRANULARITY 1, + INDEX idx_attributes_text_search lower(attributes_text) TYPE ngrambf_v1(3, 32768, 2, 0) GRANULARITY 1 +) +ENGINE = MergeTree +PARTITION BY toDate(triggered_timestamp) +ORDER BY (organization_id, environment_id, triggered_timestamp, trace_id) +--Right now we have maximum retention of up to 30 days based on plan. +--We put a logical limit for now, the 90 DAY TTL is just a backup +--This might need to be updated for longer retention periods +TTL toDateTime(triggered_timestamp) + INTERVAL 90 DAY +SETTINGS ttl_only_drop_parts = 1; + +CREATE MATERIALIZED VIEW IF NOT EXISTS trigger_dev.task_events_search_mv_v1 +TO trigger_dev.task_events_search_v1 AS +SELECT + environment_id, + organization_id, + project_id, + trace_id, + span_id, + run_id, + task_identifier, + start_time, + inserted_at, + message, + kind, + status, + duration, + parent_span_id, + attributes_text, + fromUnixTimestamp64Nano(toUnixTimestamp64Nano(start_time) + toInt64(duration)) AS triggered_timestamp +FROM trigger_dev.task_events_v2 +WHERE + kind != 'DEBUG_EVENT' + AND status != 'PARTIAL' + AND NOT (kind = 'SPAN_EVENT' AND attributes_text = '{}') + AND kind != 'ANCESTOR_OVERRIDE' + AND message != 'trigger.dev/start'; + +-- +goose Down +DROP VIEW IF EXISTS trigger_dev.task_events_search_mv_v1; +DROP TABLE IF EXISTS trigger_dev.task_events_search_v1; diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index 50b39d35a7..47c2f34f2f 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -23,8 +23,8 @@ import { getTraceSummaryQueryBuilderV2, insertTaskEvents, insertTaskEventsV2, - getLogsListQueryBuilderV2, getLogDetailQueryBuilderV2, + getLogsSearchListQueryBuilder, } from "./taskEvents.js"; import { Logger, type LogLevel } from "@trigger.dev/core/logger"; import type { Agent as HttpAgent } from "http"; @@ -220,8 +220,13 @@ export class ClickHouse { traceSummaryQueryBuilder: getTraceSummaryQueryBuilderV2(this.reader), traceDetailedSummaryQueryBuilder: getTraceDetailedSummaryQueryBuilderV2(this.reader), spanDetailsQueryBuilder: getSpanDetailsQueryBuilderV2(this.reader), - logsListQueryBuilder: getLogsListQueryBuilderV2(this.reader, this.logsQuerySettings?.list), logDetailQueryBuilder: getLogDetailQueryBuilderV2(this.reader, this.logsQuerySettings?.detail), }; } + + get taskEventsSearch() { + return { + logsListQueryBuilder: getLogsSearchListQueryBuilder(this.reader, this.logsQuerySettings?.list), + }; + } } diff --git a/internal-packages/clickhouse/src/taskEvents.ts b/internal-packages/clickhouse/src/taskEvents.ts index 890eab9cc7..73e2d8344e 100644 --- a/internal-packages/clickhouse/src/taskEvents.ts +++ b/internal-packages/clickhouse/src/taskEvents.ts @@ -231,11 +231,12 @@ export function getSpanDetailsQueryBuilderV2( }); } + // ============================================================================ -// Logs List Query Builders (for aggregated logs page) +// Search Table Query Builders (for logs page, using task_events_search_v1) // ============================================================================ -export const LogsListResult = z.object({ +export const LogsSearchListResult = z.object({ environment_id: z.string(), organization_id: z.string(), project_id: z.string(), @@ -250,14 +251,18 @@ export const LogsListResult = z.object({ status: z.string(), duration: z.number().or(z.string()), attributes_text: z.string(), + triggered_timestamp: z.string(), }); -export type LogsListResult = z.output; +export type LogsSearchListResult = z.output; -export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: ClickHouseSettings) { - return ch.queryBuilderFast({ - name: "getLogsList", - table: "trigger_dev.task_events_v2", +export function getLogsSearchListQueryBuilder( + ch: ClickhouseReader, + settings?: ClickHouseSettings +) { + return ch.queryBuilderFast({ + name: "getLogsSearchList", + table: "trigger_dev.task_events_search_v1", columns: [ "environment_id", "organization_id", @@ -272,7 +277,8 @@ export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: Click "kind", "status", "duration", - "attributes_text" + "attributes_text", + "triggered_timestamp", ], settings, }); diff --git a/references/seed/src/trigger/spanSpammer.ts b/references/seed/src/trigger/spanSpammer.ts index b16f00c4c2..dc79ce93bf 100644 --- a/references/seed/src/trigger/spanSpammer.ts +++ b/references/seed/src/trigger/spanSpammer.ts @@ -1,4 +1,4 @@ -import { logger, task, wait } from "@trigger.dev/sdk/v3"; +import { logger, task, wait, metadata } from "@trigger.dev/sdk/v3"; const CONFIG = { delayBetweenBatchesSeconds: 0.2, @@ -14,6 +14,22 @@ export const SpanSpammerTask = task({ const context = { payload, ctx }; let logCount = 0; + // 30s trace with events every 5s + await logger.trace("10s-span", async () => { + const totalSeconds = 10; + const intervalSeconds = 2; + const totalEvents = totalSeconds / intervalSeconds; + + logger.info("Starting 30s span", context); + + for (let i = 1; i <= totalEvents; i++) { + await wait.for({ seconds: intervalSeconds }); + logger.info(`Inner event ${i}/${totalEvents} at ${i * intervalSeconds}s`, context); + } + + logger.info("Completed 30s span", context); + }); + logger.info("Starting span spammer task", context); logger.warn("This will generate a lot of logs", context); @@ -36,6 +52,14 @@ export const SpanSpammerTask = task({ emitBatch("This is a test log!!! Log number: "); } + metadata.parent.set("childStatus", "running"); + metadata.parent.increment("completedChildren", 1); + + // Update the root run's metadata (top-level run in the chain) + metadata.root.set("deepChildStatus", "done"); + metadata.root.append("completedTasks", "child-task"); + + logger.info("Completed span spammer task", context); return { message: `Created ${logCount} logs` }; },