From d322cec7eeff0d2573713ad7dc20c6995d90baef Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 18:17:56 +0200 Subject: [PATCH 01/10] * New ClickHouse table & MV (task_events_search_v1): A search-optimized materialized view that filters out debug events, partial spans, and empty span events at ingestion time. * ClickHouse client updates: New getLogsSearchListQueryBuilder and taskEventsSearch accessor on the ClickHouse class. * LogsListPresenter: Switches to the new search table, uses triggered_timestamp for cursor pagination instead of unixTimestamp. * Spans route: Also switches to the new search query builder. * Seed spanSpammer: Adds a 10s trace with events and metadata operations for testing. --- .../presenters/v3/LogsListPresenter.server.ts | 39 +++++------- ...tParam.env.$envParam.logs.$logId.spans.tsx | 2 +- .../schema/016_add_task_events_search_v1.sql | 62 +++++++++++++++++++ internal-packages/clickhouse/src/index.ts | 9 ++- .../clickhouse/src/taskEvents.ts | 24 ++++--- references/seed/src/trigger/spanSpammer.ts | 26 +++++++- 6 files changed, 125 insertions(+), 37 deletions(-) create mode 100644 internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index b1c03f8b74..d751e6e960 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -84,13 +84,13 @@ export type LogsListAppliedFilters = LogsList["filters"]; // Cursor is a base64 encoded JSON of the pagination keys type LogCursor = { environmentId: string; - unixTimestamp: number; + triggeredTimestamp: string; // DateTime64(9) string traceId: string; }; const LogCursorSchema = z.object({ environmentId: z.string(), - unixTimestamp: z.number(), + triggeredTimestamp: z.string(), traceId: z.string(), }); @@ -252,7 +252,7 @@ export class LogsListPresenter extends BasePresenter { ); } - const queryBuilder = this.clickhouse.taskEventsV2.logsListQueryBuilder(); + const queryBuilder = this.clickhouse.taskEventsSearch.logsListQueryBuilder(); queryBuilder.where("environment_id = {environmentId: String}", { environmentId, @@ -350,38 +350,30 @@ export class LogsListPresenter extends BasePresenter { } // Debug logs are available only to admins - if (includeDebugLogs === false) { - queryBuilder.where("kind NOT IN {debugKinds: Array(String)}", { - debugKinds: ["DEBUG_EVENT"], - }); - - queryBuilder.where("NOT ((kind = 'LOG_INFO') AND (attributes_text = '{}'))"); - } - - queryBuilder.where("kind NOT IN {debugSpans: Array(String)}", { - debugSpans: ["SPAN", "ANCESTOR_OVERRIDE", "SPAN_EVENT"], - }); - - // kindCondition += ` `; - // params["excluded_statuses"] = ["SPAN", "ANCESTOR_OVERRIDE", "SPAN_EVENT"]; - + // if (includeDebugLogs === false) { + // queryBuilder.where("kind NOT IN {debugKinds: Array(String)}", { + // debugKinds: ["DEBUG_EVENT"], + // }); + // + // queryBuilder.where("NOT ((kind = 'LOG_INFO') AND (attributes_text = '{}'))"); + // } - queryBuilder.where("NOT (kind = 'SPAN' AND status = 'PARTIAL')"); + // SPAN, ANCESTOR_OVERRIDE, SPAN_EVENT kinds are already filtered out by the materialized view // Cursor pagination const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { queryBuilder.where( - "(environment_id, toUnixTimestamp(start_time), trace_id) < ({cursorEnvId: String}, {cursorUnixTimestamp: Int64}, {cursorTraceId: String})", + "(environment_id, triggered_timestamp, trace_id) < ({cursorEnvId: String}, {cursorTriggeredTimestamp: String}, {cursorTraceId: String})", { cursorEnvId: decodedCursor.environmentId, - cursorUnixTimestamp: decodedCursor.unixTimestamp, + cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp, cursorTraceId: decodedCursor.traceId, } ); } - queryBuilder.orderBy("environment_id DESC, toUnixTimestamp(start_time) DESC, trace_id DESC"); + queryBuilder.orderBy("environment_id DESC, triggered_timestamp DESC, trace_id DESC"); // Limit + 1 to check if there are more results queryBuilder.limit(pageSize + 1); @@ -399,10 +391,9 @@ export class LogsListPresenter extends BasePresenter { let nextCursor: string | undefined; if (hasMore && logs.length > 0) { const lastLog = logs[logs.length - 1]; - const unixTimestamp = Math.floor(new Date(lastLog.start_time).getTime() / 1000); nextCursor = encodeCursor({ environmentId, - unixTimestamp, + triggeredTimestamp: lastLog.triggered_timestamp, traceId: lastLog.trace_id, }); } diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx index 53bc655a02..2eab7492fc 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx @@ -63,7 +63,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { } // Query ClickHouse for related spans in the same trace - const queryBuilder = clickhouseClient.taskEventsV2.logsListQueryBuilder(); + const queryBuilder = clickhouseClient.taskEventsSearch.logsListQueryBuilder(); queryBuilder.where("environment_id = {environmentId: String}", { environmentId: environment.id, diff --git a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql new file mode 100644 index 0000000000..b44a3555f0 --- /dev/null +++ b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql @@ -0,0 +1,62 @@ +-- +goose Up +CREATE TABLE IF NOT EXISTS trigger_dev.task_events_search_v1 +( + environment_id String, + organization_id String, + project_id String, + triggered_timestamp DateTime64(9) CODEC(Delta(8), ZSTD(1)), + trace_id String CODEC(ZSTD(1)), + span_id String CODEC(ZSTD(1)), + run_id String CODEC(ZSTD(1)), + task_identifier String CODEC(ZSTD(1)), + start_time DateTime64(9) CODEC(Delta(8), ZSTD(1)), + inserted_at DateTime64(3), + message String CODEC(ZSTD(1)), + kind LowCardinality(String) CODEC(ZSTD(1)), + status LowCardinality(String) CODEC(ZSTD(1)), + duration UInt64 CODEC(ZSTD(1)), + parent_span_id String CODEC(ZSTD(1)), + attributes_text String CODEC(ZSTD(1)), + + INDEX idx_run_id run_id TYPE bloom_filter(0.001) GRANULARITY 1, + INDEX idx_message_text_search lower(message) TYPE ngrambf_v1(3, 32768, 2, 0) GRANULARITY 1, + INDEX idx_attributes_text_search lower(attributes_text) TYPE ngrambf_v1(3, 32768, 2, 0) GRANULARITY 1 +) +ENGINE = MergeTree +PARTITION BY toDate(triggered_timestamp) +ORDER BY (organization_id, environment_id, triggered_timestamp) +--Right now we have maximum retention of up to 30 days based on plan. +--We put a logical limit for now, the 90 DAY TTL is just a backup +--This might need to be updated for longer retention periods +TTL toDateTime(triggered_timestamp) + INTERVAL 90 DAY +SETTINGS ttl_only_drop_parts = 1; + +CREATE MATERIALIZED VIEW IF NOT EXISTS trigger_dev.task_events_search_mv_v1 +TO trigger_dev.task_events_search_v1 AS +SELECT + environment_id, + organization_id, + project_id, + trace_id, + span_id, + run_id, + task_identifier, + start_time, + inserted_at, + message, + kind, + status, + duration, + parent_span_id, + toJSONString(attributes) AS attributes_text, + fromUnixTimestamp64Nano(toUnixTimestamp64Nano(start_time) + toInt64(duration)) AS triggered_timestamp +FROM trigger_dev.task_events_v2 +WHERE + kind != 'DEBUG_EVENT' + AND status != 'PARTIAL' + AND NOT (kind = 'SPAN_EVENT' AND attributes_text = '{}') + AND message != 'trigger.dev/start'; + +-- +goose Down +DROP VIEW IF EXISTS trigger_dev.task_events_search_mv_v1; +DROP TABLE IF EXISTS trigger_dev.task_events_search_v1; diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts index 50b39d35a7..47c2f34f2f 100644 --- a/internal-packages/clickhouse/src/index.ts +++ b/internal-packages/clickhouse/src/index.ts @@ -23,8 +23,8 @@ import { getTraceSummaryQueryBuilderV2, insertTaskEvents, insertTaskEventsV2, - getLogsListQueryBuilderV2, getLogDetailQueryBuilderV2, + getLogsSearchListQueryBuilder, } from "./taskEvents.js"; import { Logger, type LogLevel } from "@trigger.dev/core/logger"; import type { Agent as HttpAgent } from "http"; @@ -220,8 +220,13 @@ export class ClickHouse { traceSummaryQueryBuilder: getTraceSummaryQueryBuilderV2(this.reader), traceDetailedSummaryQueryBuilder: getTraceDetailedSummaryQueryBuilderV2(this.reader), spanDetailsQueryBuilder: getSpanDetailsQueryBuilderV2(this.reader), - logsListQueryBuilder: getLogsListQueryBuilderV2(this.reader, this.logsQuerySettings?.list), logDetailQueryBuilder: getLogDetailQueryBuilderV2(this.reader, this.logsQuerySettings?.detail), }; } + + get taskEventsSearch() { + return { + logsListQueryBuilder: getLogsSearchListQueryBuilder(this.reader, this.logsQuerySettings?.list), + }; + } } diff --git a/internal-packages/clickhouse/src/taskEvents.ts b/internal-packages/clickhouse/src/taskEvents.ts index 890eab9cc7..561de7e879 100644 --- a/internal-packages/clickhouse/src/taskEvents.ts +++ b/internal-packages/clickhouse/src/taskEvents.ts @@ -231,11 +231,12 @@ export function getSpanDetailsQueryBuilderV2( }); } + // ============================================================================ -// Logs List Query Builders (for aggregated logs page) +// Search Table Query Builders (for logs page, using task_events_search_v1) // ============================================================================ -export const LogsListResult = z.object({ +export const LogsSearchListResult = z.object({ environment_id: z.string(), organization_id: z.string(), project_id: z.string(), @@ -250,14 +251,18 @@ export const LogsListResult = z.object({ status: z.string(), duration: z.number().or(z.string()), attributes_text: z.string(), + triggered_timestamp: z.string(), }); -export type LogsListResult = z.output; +export type LogsSearchListResult = z.output; -export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: ClickHouseSettings) { - return ch.queryBuilderFast({ - name: "getLogsList", - table: "trigger_dev.task_events_v2", +export function getLogsSearchListQueryBuilder( + ch: ClickhouseReader, + settings?: ClickHouseSettings +) { + return ch.queryBuilderFast({ + name: "getLogsSearchList", + table: "trigger_dev.task_events_search_v1", columns: [ "environment_id", "organization_id", @@ -268,11 +273,12 @@ export function getLogsListQueryBuilderV2(ch: ClickhouseReader, settings?: Click "trace_id", "span_id", "parent_span_id", - { name: "message", expression: "LEFT(message, 512)" }, + { name: "message", expression: "LEFT(message, 256)" }, "kind", "status", "duration", - "attributes_text" + "attributes_text", + "triggered_timestamp", ], settings, }); diff --git a/references/seed/src/trigger/spanSpammer.ts b/references/seed/src/trigger/spanSpammer.ts index b16f00c4c2..dc79ce93bf 100644 --- a/references/seed/src/trigger/spanSpammer.ts +++ b/references/seed/src/trigger/spanSpammer.ts @@ -1,4 +1,4 @@ -import { logger, task, wait } from "@trigger.dev/sdk/v3"; +import { logger, task, wait, metadata } from "@trigger.dev/sdk/v3"; const CONFIG = { delayBetweenBatchesSeconds: 0.2, @@ -14,6 +14,22 @@ export const SpanSpammerTask = task({ const context = { payload, ctx }; let logCount = 0; + // 30s trace with events every 5s + await logger.trace("10s-span", async () => { + const totalSeconds = 10; + const intervalSeconds = 2; + const totalEvents = totalSeconds / intervalSeconds; + + logger.info("Starting 30s span", context); + + for (let i = 1; i <= totalEvents; i++) { + await wait.for({ seconds: intervalSeconds }); + logger.info(`Inner event ${i}/${totalEvents} at ${i * intervalSeconds}s`, context); + } + + logger.info("Completed 30s span", context); + }); + logger.info("Starting span spammer task", context); logger.warn("This will generate a lot of logs", context); @@ -36,6 +52,14 @@ export const SpanSpammerTask = task({ emitBatch("This is a test log!!! Log number: "); } + metadata.parent.set("childStatus", "running"); + metadata.parent.increment("completedChildren", 1); + + // Update the root run's metadata (top-level run in the chain) + metadata.root.set("deepChildStatus", "done"); + metadata.root.append("completedTasks", "child-task"); + + logger.info("Completed span spammer task", context); return { message: `Created ${logCount} logs` }; }, From a61414910652544ec707028086540ed717fe2449 Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 19:03:00 +0200 Subject: [PATCH 02/10] Changed pagination to use span_id, not trace_id, more granular --- .../app/presenters/v3/LogsListPresenter.server.ts | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index d751e6e960..42ea27aa2e 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -85,13 +85,13 @@ export type LogsListAppliedFilters = LogsList["filters"]; type LogCursor = { environmentId: string; triggeredTimestamp: string; // DateTime64(9) string - traceId: string; + spanId: string; }; const LogCursorSchema = z.object({ environmentId: z.string(), triggeredTimestamp: z.string(), - traceId: z.string(), + spanId: z.string(), }); function encodeCursor(cursor: LogCursor): string { @@ -360,20 +360,21 @@ export class LogsListPresenter extends BasePresenter { // SPAN, ANCESTOR_OVERRIDE, SPAN_EVENT kinds are already filtered out by the materialized view - // Cursor pagination + // Cursor pagination using explicit lexicographic comparison + // Must mirror the ORDER BY columns: (environment_id DESC, triggered_timestamp DESC, span_id DESC) const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { queryBuilder.where( - "(environment_id, triggered_timestamp, trace_id) < ({cursorEnvId: String}, {cursorTriggeredTimestamp: String}, {cursorTraceId: String})", + `((environment_id = {cursorEnvId: String} AND triggered_timestamp < {cursorTriggeredTimestamp: String}) OR (environment_id = {cursorEnvId: String} AND triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}) OR (environment_id < {cursorEnvId: String}))`, { cursorEnvId: decodedCursor.environmentId, cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp, - cursorTraceId: decodedCursor.traceId, + cursorSpanId: decodedCursor.spanId, } ); } - queryBuilder.orderBy("environment_id DESC, triggered_timestamp DESC, trace_id DESC"); + queryBuilder.orderBy("environment_id DESC, triggered_timestamp DESC, span_id DESC"); // Limit + 1 to check if there are more results queryBuilder.limit(pageSize + 1); @@ -394,7 +395,7 @@ export class LogsListPresenter extends BasePresenter { nextCursor = encodeCursor({ environmentId, triggeredTimestamp: lastLog.triggered_timestamp, - traceId: lastLog.trace_id, + spanId: lastLog.span_id, }); } From cc6dc027af0fe5c7267e25dd975e602f3c8425bc Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 19:45:45 +0200 Subject: [PATCH 03/10] Changed pagination to use span_id, not trace_id, more granular Improved cursor Removed debug from logs --- .../presenters/v3/LogsListPresenter.server.ts | 24 ++++------- .../route.tsx | 42 +------------------ ...ojects.$projectParam.env.$envParam.logs.ts | 3 -- .../schema/016_add_task_events_search_v1.sql | 3 +- 4 files changed, 10 insertions(+), 62 deletions(-) diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 42ea27aa2e..04666908fb 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -50,7 +50,6 @@ export type LogsListOptions = { retentionLimitDays?: number; // search search?: string; - includeDebugLogs?: boolean; // pagination direction?: Direction; cursor?: string; @@ -69,7 +68,6 @@ export const LogsListOptionsSchema = z.object({ defaultPeriod: z.string().optional(), retentionLimitDays: z.number().int().positive().optional(), search: z.string().max(1000).optional(), - includeDebugLogs: z.boolean().optional(), direction: z.enum(["forward", "backward"]).optional(), cursor: z.string().optional(), pageSize: z.number().int().positive().max(1000).optional(), @@ -83,12 +81,14 @@ export type LogsListAppliedFilters = LogsList["filters"]; // Cursor is a base64 encoded JSON of the pagination keys type LogCursor = { + organizationId: string; environmentId: string; triggeredTimestamp: string; // DateTime64(9) string spanId: string; }; const LogCursorSchema = z.object({ + organizationId: z.string(), environmentId: z.string(), triggeredTimestamp: z.string(), spanId: z.string(), @@ -166,7 +166,6 @@ export class LogsListPresenter extends BasePresenter { to, cursor, pageSize = DEFAULT_PAGE_SIZE, - includeDebugLogs = true, defaultPeriod, retentionLimitDays, }: LogsListOptions @@ -349,24 +348,14 @@ export class LogsListPresenter extends BasePresenter { } } - // Debug logs are available only to admins - // if (includeDebugLogs === false) { - // queryBuilder.where("kind NOT IN {debugKinds: Array(String)}", { - // debugKinds: ["DEBUG_EVENT"], - // }); - // - // queryBuilder.where("NOT ((kind = 'LOG_INFO') AND (attributes_text = '{}'))"); - // } - - // SPAN, ANCESTOR_OVERRIDE, SPAN_EVENT kinds are already filtered out by the materialized view - // Cursor pagination using explicit lexicographic comparison - // Must mirror the ORDER BY columns: (environment_id DESC, triggered_timestamp DESC, span_id DESC) + // Must mirror the ORDER BY columns: (organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC) const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { queryBuilder.where( - `((environment_id = {cursorEnvId: String} AND triggered_timestamp < {cursorTriggeredTimestamp: String}) OR (environment_id = {cursorEnvId: String} AND triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}) OR (environment_id < {cursorEnvId: String}))`, + `((organization_id = {cursorOrgId: String} AND environment_id = {cursorEnvId: String} AND triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}) OR (organization_id = {cursorOrgId: String} AND environment_id = {cursorEnvId: String} AND triggered_timestamp < {cursorTriggeredTimestamp: String}) OR (organization_id = {cursorOrgId: String} AND environment_id < {cursorEnvId: String}) OR (organization_id < {cursorOrgId: String}))`, { + cursorOrgId: decodedCursor.organizationId, cursorEnvId: decodedCursor.environmentId, cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp, cursorSpanId: decodedCursor.spanId, @@ -374,7 +363,7 @@ export class LogsListPresenter extends BasePresenter { ); } - queryBuilder.orderBy("environment_id DESC, triggered_timestamp DESC, span_id DESC"); + queryBuilder.orderBy("organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC"); // Limit + 1 to check if there are more results queryBuilder.limit(pageSize + 1); @@ -393,6 +382,7 @@ export class LogsListPresenter extends BasePresenter { if (hasMore && logs.length > 0) { const lastLog = logs[logs.length - 1]; nextCursor = encodeCursor({ + organizationId, environmentId, triggeredTimestamp: lastLog.triggered_timestamp, spanId: lastLog.span_id, diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx index 84dbc2deda..9c2eccc8ea 100644 --- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx +++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs/route.tsx @@ -36,7 +36,6 @@ import { ResizablePanel, ResizablePanelGroup, } from "~/components/primitives/Resizable"; -import { Switch } from "~/components/primitives/Switch"; import { Button } from "~/components/primitives/Buttons"; import { FEATURE_FLAG, validateFeatureFlagValue } from "~/v3/featureFlags.server"; @@ -95,7 +94,6 @@ async function hasLogsPageAccess( export const loader = async ({ request, params }: LoaderFunctionArgs) => { const user = await requireUser(request); const userId = user.id; - const isAdmin = user.admin || user.isImpersonating; const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); @@ -126,7 +124,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const runId = url.searchParams.get("runId") ?? undefined; const search = url.searchParams.get("search") ?? undefined; const levels = parseLevelsFromUrl(url); - const showDebug = url.searchParams.get("showDebug") === "true"; const period = url.searchParams.get("period") ?? undefined; const fromStr = url.searchParams.get("from"); const toStr = url.searchParams.get("to"); @@ -150,7 +147,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { period, from, to, - includeDebugLogs: isAdmin && showDebug, defaultPeriod: "1h", retentionLimitDays }) @@ -163,15 +159,13 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { return typeddefer({ data: listPromise, - isAdmin, - showDebug, defaultPeriod: "1h", retentionLimitDays, }); }; export default function Page() { - const { data, isAdmin, showDebug, defaultPeriod, retentionLimitDays } = + const { data, defaultPeriod, retentionLimitDays } = useTypedLoaderData(); return ( @@ -199,8 +193,6 @@ export default function Page() { errorElement={
@@ -218,8 +210,6 @@ export default function Page() { return (
@@ -235,15 +225,11 @@ export default function Page() {
@@ -258,14 +244,10 @@ export default function Page() { function FiltersBar({ list, - isAdmin, - showDebug, defaultPeriod, retentionLimitDays, }: { list?: Exclude["data"]>, { error: string }>; - isAdmin: boolean; - showDebug: boolean; defaultPeriod?: string; retentionLimitDays: number; }) { @@ -280,16 +262,6 @@ function FiltersBar({ searchParams.has("from") || searchParams.has("to"); - const handleDebugToggle = useCallback((checked: boolean) => { - const url = new URL(window.location.href); - if (checked) { - url.searchParams.set("showDebug", "true"); - } else { - url.searchParams.delete("showDebug"); - } - window.location.href = url.toString(); - }, []); - return (
@@ -329,16 +301,6 @@ function FiltersBar({ )}
-
- {isAdmin && ( - - )} -
); } @@ -347,8 +309,6 @@ function LogsList({ list, }: { list: Exclude["data"]>, { error: string }>; //exclude error, it is handled - isAdmin: boolean; - showDebug: boolean; defaultPeriod?: string; }) { const navigation = useNavigation(); diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts index 656e20472e..cac4c0f702 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.ts @@ -21,7 +21,6 @@ function parseLevelsFromUrl(url: URL): LogLevel[] | undefined { export const loader = async ({ request, params }: LoaderFunctionArgs) => { const user = await requireUser(request); const userId = user.id; - const isAdmin = user?.admin || user?.isImpersonating; const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params); @@ -46,7 +45,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { const search = url.searchParams.get("search") ?? undefined; const cursor = url.searchParams.get("cursor") ?? undefined; const levels = parseLevelsFromUrl(url); - const showDebug = url.searchParams.get("showDebug") === "true"; const period = url.searchParams.get("period") ?? undefined; const fromStr = url.searchParams.get("from"); const toStr = url.searchParams.get("to"); @@ -67,7 +65,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { from, to, levels, - includeDebugLogs: isAdmin && showDebug, defaultPeriod: "1h", retentionLimitDays, }) as any; // Validated by LogsListOptionsSchema at runtime diff --git a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql index b44a3555f0..143540bd83 100644 --- a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql +++ b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql @@ -24,7 +24,7 @@ CREATE TABLE IF NOT EXISTS trigger_dev.task_events_search_v1 ) ENGINE = MergeTree PARTITION BY toDate(triggered_timestamp) -ORDER BY (organization_id, environment_id, triggered_timestamp) +ORDER BY (organization_id, environment_id, triggered_timestamp, span_id) --Right now we have maximum retention of up to 30 days based on plan. --We put a logical limit for now, the 90 DAY TTL is just a backup --This might need to be updated for longer retention periods @@ -55,6 +55,7 @@ WHERE kind != 'DEBUG_EVENT' AND status != 'PARTIAL' AND NOT (kind = 'SPAN_EVENT' AND attributes_text = '{}') + AND kind != 'ANCESTOR_OVERRIDE' AND message != 'trigger.dev/start'; -- +goose Down From f3534c453e038bb5c0e466c3a801c172d6549d5f Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 20:20:46 +0200 Subject: [PATCH 04/10] Review fixes --- .../app/presenters/v3/LogsListPresenter.server.ts | 10 +++++----- ...s.$projectParam.env.$envParam.logs.$logId.spans.tsx | 5 ++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 04666908fb..99263020cc 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -116,7 +116,7 @@ function decodeCursor(cursor: string): LogCursor | null { function levelToKindsAndStatuses(level: LogLevel): { kinds?: string[]; statuses?: string[] } { switch (level) { case "DEBUG": - return { kinds: ["DEBUG_EVENT", "LOG_DEBUG"] }; + return { kinds: ["LOG_DEBUG"] }; case "INFO": return { kinds: ["LOG_INFO", "LOG_LOG"] }; case "WARN": @@ -352,18 +352,18 @@ export class LogsListPresenter extends BasePresenter { // Must mirror the ORDER BY columns: (organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC) const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { + // organization_id and environment_id are already pinned by equality filters above, + // so cursor only needs to compare triggered_timestamp and span_id queryBuilder.where( - `((organization_id = {cursorOrgId: String} AND environment_id = {cursorEnvId: String} AND triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}) OR (organization_id = {cursorOrgId: String} AND environment_id = {cursorEnvId: String} AND triggered_timestamp < {cursorTriggeredTimestamp: String}) OR (organization_id = {cursorOrgId: String} AND environment_id < {cursorEnvId: String}) OR (organization_id < {cursorOrgId: String}))`, + `(triggered_timestamp < {cursorTriggeredTimestamp: String} OR (triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}))`, { - cursorOrgId: decodedCursor.organizationId, - cursorEnvId: decodedCursor.environmentId, cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp, cursorSpanId: decodedCursor.spanId, } ); } - queryBuilder.orderBy("organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC"); + queryBuilder.orderBy("triggered_timestamp DESC, span_id DESC"); // Limit + 1 to check if there are more results queryBuilder.limit(pageSize + 1); diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx index 2eab7492fc..17ac4598a1 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx @@ -5,13 +5,13 @@ import { EnvironmentParamSchema } from "~/utils/pathBuilder"; import { findProjectBySlug } from "~/models/project.server"; import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; import { clickhouseClient } from "~/services/clickhouseInstance.server"; +import { convertClickhouseDateTime64ToJsDate } from "~/v3/eventRepository/clickhouseEventRepository.server"; // Convert ClickHouse kind to display level function kindToLevel( kind: string ): "TRACE" | "DEBUG" | "INFO" | "WARN" | "ERROR" | "LOG" { switch (kind) { - case "DEBUG_EVENT": case "LOG_DEBUG": return "DEBUG"; case "LOG_INFO": @@ -23,7 +23,6 @@ function kindToLevel( case "LOG_LOG": return "LOG"; case "SPAN": - case "ANCESTOR_OVERRIDE": case "SPAN_EVENT": default: return "TRACE"; @@ -91,7 +90,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { kind: row.kind, level: kindToLevel(row.kind), status: row.status, - startTime: new Date(Number(row.start_time) / 1_000_000).toISOString(), + startTime: convertClickhouseDateTime64ToJsDate(row.start_time).toISOString(), duration: Number(row.duration), isCurrent: row.span_id === currentSpanId, })); From 51ceac9bd76e6206267aa6804f30389624df7b97 Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 20:33:28 +0200 Subject: [PATCH 05/10] added org id to logs search --- ....projects.$projectParam.env.$envParam.logs.$logId.spans.tsx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx index 17ac4598a1..9f5c73b15a 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx @@ -64,6 +64,9 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { // Query ClickHouse for related spans in the same trace const queryBuilder = clickhouseClient.taskEventsSearch.logsListQueryBuilder(); + queryBuilder.where("organization_id = {organizationId: String}", { + organizationId: project.organizationId, + }); queryBuilder.where("environment_id = {environmentId: String}", { environmentId: environment.id, }); From b9324cd9b61211c1d9c4c37a288ab7f50475188c Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 20:43:36 +0200 Subject: [PATCH 06/10] Removed start_time filter --- apps/webapp/app/presenters/v3/LogsListPresenter.server.ts | 8 -------- 1 file changed, 8 deletions(-) diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 99263020cc..7614f2f3b4 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -269,10 +269,6 @@ export class LogsListPresenter extends BasePresenter { queryBuilder.where("inserted_at >= {insertedAtStart: DateTime64(3)}", { insertedAtStart: convertDateToClickhouseDateTime(effectiveFrom), }); - - queryBuilder.where("start_time >= {fromTime: String}", { - fromTime: formatNanosecondsForClickhouse(fromNs), - }); } if (effectiveTo) { @@ -282,10 +278,6 @@ export class LogsListPresenter extends BasePresenter { queryBuilder.where("inserted_at <= {insertedAtEnd: DateTime64(3)}", { insertedAtEnd: convertDateToClickhouseDateTime(clampedTo), }); - - queryBuilder.where("start_time <= {toTime: String}", { - toTime: formatNanosecondsForClickhouse(toNs), - }); } // Task filter (applies directly to ClickHouse) From 8dd9258335eb96a00dce391ad29aa64099a5e277 Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 20:54:44 +0200 Subject: [PATCH 07/10] Updated logs with triggeredTimestamp instead of starTime --- apps/webapp/app/components/logs/LogDetailView.tsx | 2 +- apps/webapp/app/components/logs/LogsTable.tsx | 2 +- .../app/presenters/v3/LogDetailPresenter.server.ts | 5 +++++ .../app/presenters/v3/LogsListPresenter.server.ts | 14 +++++++------- 4 files changed, 14 insertions(+), 9 deletions(-) diff --git a/apps/webapp/app/components/logs/LogDetailView.tsx b/apps/webapp/app/components/logs/LogDetailView.tsx index 6b3a76b8a8..a5cbb15aeb 100644 --- a/apps/webapp/app/components/logs/LogDetailView.tsx +++ b/apps/webapp/app/components/logs/LogDetailView.tsx @@ -234,7 +234,7 @@ function DetailsTab({ log, runPath, searchTerm }: { log: LogEntry; runPath: stri
Timestamp
- +
diff --git a/apps/webapp/app/components/logs/LogsTable.tsx b/apps/webapp/app/components/logs/LogsTable.tsx index a361d95c5e..2f6894a03e 100644 --- a/apps/webapp/app/components/logs/LogsTable.tsx +++ b/apps/webapp/app/components/logs/LogsTable.tsx @@ -162,7 +162,7 @@ export function LogsTable({ boxShadow: getLevelBoxShadow(log.level), }} > - + diff --git a/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts b/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts index e23e2f552c..d354018b82 100644 --- a/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogDetailPresenter.server.ts @@ -81,12 +81,17 @@ export class LogDetailPresenter { // Ignore parse errors } + const durationMs = (typeof log.duration === "number" ? log.duration : Number(log.duration)) / 1_000_000; + return { // Use :: separator to match LogsListPresenter format id: `${log.trace_id}::${log.span_id}::${log.run_id}::${log.start_time}`, runId: log.run_id, taskIdentifier: log.task_identifier, startTime: convertClickhouseDateTime64ToJsDate(log.start_time).toISOString(), + triggeredTimestamp: new Date( + convertClickhouseDateTime64ToJsDate(log.start_time).getTime() + durationMs + ).toISOString(), traceId: log.trace_id, spanId: log.span_id, parentSpanId: log.parent_span_id || null, diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 7614f2f3b4..889419180f 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -264,19 +264,16 @@ export class LogsListPresenter extends BasePresenter { if (effectiveFrom) { - const fromNs = convertDateToNanoseconds(effectiveFrom); - - queryBuilder.where("inserted_at >= {insertedAtStart: DateTime64(3)}", { - insertedAtStart: convertDateToClickhouseDateTime(effectiveFrom), + queryBuilder.where("triggered_timestamp >= {triggeredAtStart: DateTime64(3)}", { + triggeredAtStart: convertDateToClickhouseDateTime(effectiveFrom), }); } if (effectiveTo) { const clampedTo = effectiveTo > new Date() ? new Date() : effectiveTo; - const toNs = convertDateToNanoseconds(clampedTo); - queryBuilder.where("inserted_at <= {insertedAtEnd: DateTime64(3)}", { - insertedAtEnd: convertDateToClickhouseDateTime(clampedTo), + queryBuilder.where("triggered_timestamp <= {triggeredAtEnd: DateTime64(3)}", { + triggeredAtEnd: convertDateToClickhouseDateTime(clampedTo), }); } @@ -404,6 +401,9 @@ export class LogsListPresenter extends BasePresenter { runId: log.run_id, taskIdentifier: log.task_identifier, startTime: convertClickhouseDateTime64ToJsDate(log.start_time).toISOString(), + triggeredTimestamp: convertClickhouseDateTime64ToJsDate( + log.triggered_timestamp + ).toISOString(), traceId: log.trace_id, spanId: log.span_id, parentSpanId: log.parent_span_id || null, From 58022ccd24173fd86cb135e3b170430ad2ca7650 Mon Sep 17 00:00:00 2001 From: mpcgird Date: Thu, 12 Feb 2026 21:21:13 +0200 Subject: [PATCH 08/10] small filtering fixes removed unused code --- .../presenters/v3/LogsListPresenter.server.ts | 23 ++----------------- ...tParam.env.$envParam.logs.$logId.spans.tsx | 3 +++ .../clickhouse/src/taskEvents.ts | 2 +- 3 files changed, 6 insertions(+), 22 deletions(-) diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index 889419180f..bd1fc50455 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -118,31 +118,14 @@ function levelToKindsAndStatuses(level: LogLevel): { kinds?: string[]; statuses? case "DEBUG": return { kinds: ["LOG_DEBUG"] }; case "INFO": - return { kinds: ["LOG_INFO", "LOG_LOG"] }; + return { kinds: ["LOG_INFO", "LOG_LOG", "SPAN"] }; case "WARN": return { kinds: ["LOG_WARN"] }; case "ERROR": - return { kinds: ["LOG_ERROR"], statuses: ["ERROR"] }; + return { kinds: ["LOG_ERROR", "SPAN_EVENT"], statuses: ["ERROR"] }; } } -function convertDateToNanoseconds(date: Date): bigint { - return BigInt(date.getTime()) * 1_000_000n; -} - -function formatNanosecondsForClickhouse(ns: bigint): string { - const nsString = ns.toString(); - // Handle negative numbers (dates before 1970-01-01) - if (nsString.startsWith("-")) { - const absString = nsString.slice(1); - const padded = absString.padStart(19, "0"); - return "-" + padded.slice(0, 10) + "." + padded.slice(10); - } - // Pad positive numbers to 19 digits to ensure correct slicing - const padded = nsString.padStart(19, "0"); - return padded.slice(0, 10) + "." + padded.slice(10); -} - export class LogsListPresenter extends BasePresenter { constructor( private readonly replica: PrismaClientOrTransaction, @@ -341,8 +324,6 @@ export class LogsListPresenter extends BasePresenter { // Must mirror the ORDER BY columns: (organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC) const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { - // organization_id and environment_id are already pinned by equality filters above, - // so cursor only needs to compare triggered_timestamp and span_id queryBuilder.where( `(triggered_timestamp < {cursorTriggeredTimestamp: String} OR (triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}))`, { diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx index 9f5c73b15a..d5f7f9734f 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx @@ -70,6 +70,9 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { queryBuilder.where("environment_id = {environmentId: String}", { environmentId: environment.id, }); + queryBuilder.where("project_id = {projectId: String}", { + projectId: project.id, + }); queryBuilder.where("trace_id = {traceId: String}", { traceId }); queryBuilder.where("run_id = {runId: String}", { runId }); diff --git a/internal-packages/clickhouse/src/taskEvents.ts b/internal-packages/clickhouse/src/taskEvents.ts index 561de7e879..73e2d8344e 100644 --- a/internal-packages/clickhouse/src/taskEvents.ts +++ b/internal-packages/clickhouse/src/taskEvents.ts @@ -273,7 +273,7 @@ export function getLogsSearchListQueryBuilder( "trace_id", "span_id", "parent_span_id", - { name: "message", expression: "LEFT(message, 256)" }, + { name: "message", expression: "LEFT(message, 512)" }, "kind", "status", "duration", From 96163c6497ef7fa660cd6f6540eca76834a4dbf9 Mon Sep 17 00:00:00 2001 From: mpcgird Date: Fri, 13 Feb 2026 10:16:44 +0200 Subject: [PATCH 09/10] rolled back to trace_id ordering --- .../app/presenters/v3/LogsListPresenter.server.ts | 14 +++++++------- .../schema/016_add_task_events_search_v1.sql | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts index bd1fc50455..545bca5cce 100644 --- a/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/LogsListPresenter.server.ts @@ -84,14 +84,14 @@ type LogCursor = { organizationId: string; environmentId: string; triggeredTimestamp: string; // DateTime64(9) string - spanId: string; + traceId: string; }; const LogCursorSchema = z.object({ organizationId: z.string(), environmentId: z.string(), triggeredTimestamp: z.string(), - spanId: z.string(), + traceId: z.string(), }); function encodeCursor(cursor: LogCursor): string { @@ -321,19 +321,19 @@ export class LogsListPresenter extends BasePresenter { } // Cursor pagination using explicit lexicographic comparison - // Must mirror the ORDER BY columns: (organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC) + // Must mirror the ORDER BY columns: (organization_id, environment_id, triggered_timestamp, trace_id) const decodedCursor = cursor ? decodeCursor(cursor) : null; if (decodedCursor) { queryBuilder.where( - `(triggered_timestamp < {cursorTriggeredTimestamp: String} OR (triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}))`, + `(triggered_timestamp < {cursorTriggeredTimestamp: String} OR (triggered_timestamp = {cursorTriggeredTimestamp: String} AND trace_id < {cursorTraceId: String}))`, { cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp, - cursorSpanId: decodedCursor.spanId, + cursorTraceId: decodedCursor.traceId, } ); } - queryBuilder.orderBy("triggered_timestamp DESC, span_id DESC"); + queryBuilder.orderBy("triggered_timestamp DESC, trace_id DESC"); // Limit + 1 to check if there are more results queryBuilder.limit(pageSize + 1); @@ -355,7 +355,7 @@ export class LogsListPresenter extends BasePresenter { organizationId, environmentId, triggeredTimestamp: lastLog.triggered_timestamp, - spanId: lastLog.span_id, + traceId: lastLog.trace_id, }); } diff --git a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql index 143540bd83..60ed1eb1fc 100644 --- a/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql +++ b/internal-packages/clickhouse/schema/016_add_task_events_search_v1.sql @@ -24,7 +24,7 @@ CREATE TABLE IF NOT EXISTS trigger_dev.task_events_search_v1 ) ENGINE = MergeTree PARTITION BY toDate(triggered_timestamp) -ORDER BY (organization_id, environment_id, triggered_timestamp, span_id) +ORDER BY (organization_id, environment_id, triggered_timestamp, trace_id) --Right now we have maximum retention of up to 30 days based on plan. --We put a logical limit for now, the 90 DAY TTL is just a backup --This might need to be updated for longer retention periods @@ -48,7 +48,7 @@ SELECT status, duration, parent_span_id, - toJSONString(attributes) AS attributes_text, + attributes_text, fromUnixTimestamp64Nano(toUnixTimestamp64Nano(start_time) + toInt64(duration)) AS triggered_timestamp FROM trigger_dev.task_events_v2 WHERE From 9a83b02b06c41ef6eccb1ec9162ba1fc5fda2c73 Mon Sep 17 00:00:00 2001 From: mpcgird Date: Fri, 13 Feb 2026 10:52:52 +0200 Subject: [PATCH 10/10] removed unused route --- ...tParam.env.$envParam.logs.$logId.spans.tsx | 105 ------------------ 1 file changed, 105 deletions(-) delete mode 100644 apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx deleted file mode 100644 index d5f7f9734f..0000000000 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.logs.$logId.spans.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import { type LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { json } from "@remix-run/node"; -import { requireUserId } from "~/services/session.server"; -import { EnvironmentParamSchema } from "~/utils/pathBuilder"; -import { findProjectBySlug } from "~/models/project.server"; -import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server"; -import { clickhouseClient } from "~/services/clickhouseInstance.server"; -import { convertClickhouseDateTime64ToJsDate } from "~/v3/eventRepository/clickhouseEventRepository.server"; - -// Convert ClickHouse kind to display level -function kindToLevel( - kind: string -): "TRACE" | "DEBUG" | "INFO" | "WARN" | "ERROR" | "LOG" { - switch (kind) { - case "LOG_DEBUG": - return "DEBUG"; - case "LOG_INFO": - return "INFO"; - case "LOG_WARN": - return "WARN"; - case "LOG_ERROR": - return "ERROR"; - case "LOG_LOG": - return "LOG"; - case "SPAN": - case "SPAN_EVENT": - default: - return "TRACE"; - } -} - -// Fetch related spans for a log entry from the same trace -export const loader = async ({ request, params }: LoaderFunctionArgs) => { - const userId = await requireUserId(request); - const { projectParam, organizationSlug, envParam, logId } = { - ...EnvironmentParamSchema.parse(params), - logId: params.logId, - }; - - if (!logId) { - throw new Response("Log ID is required", { status: 400 }); - } - - const project = await findProjectBySlug(organizationSlug, projectParam, userId); - if (!project) { - throw new Response("Project not found", { status: 404 }); - } - - const environment = await findEnvironmentBySlug(project.id, envParam, userId); - if (!environment) { - throw new Response("Environment not found", { status: 404 }); - } - - // Get trace ID and run ID from query params - const url = new URL(request.url); - const traceId = url.searchParams.get("traceId"); - const runId = url.searchParams.get("runId"); - const currentSpanId = url.searchParams.get("spanId"); - - if (!traceId || !runId) { - throw new Response("Trace ID and Run ID are required", { status: 400 }); - } - - // Query ClickHouse for related spans in the same trace - const queryBuilder = clickhouseClient.taskEventsSearch.logsListQueryBuilder(); - - queryBuilder.where("organization_id = {organizationId: String}", { - organizationId: project.organizationId, - }); - queryBuilder.where("environment_id = {environmentId: String}", { - environmentId: environment.id, - }); - queryBuilder.where("project_id = {projectId: String}", { - projectId: project.id, - }); - queryBuilder.where("trace_id = {traceId: String}", { traceId }); - queryBuilder.where("run_id = {runId: String}", { runId }); - - // Order by start time to show spans in chronological order - queryBuilder.orderBy("start_time ASC"); - queryBuilder.limit(50); - - const [queryError, records] = await queryBuilder.execute(); - - if (queryError) { - throw queryError; - } - - const results = records || []; - - const spans = results.map((row) => ({ - id: `${row.trace_id}::${row.span_id}::${row.run_id}::${row.start_time}`, - spanId: row.span_id, - parentSpanId: row.parent_span_id || null, - message: row.message.substring(0, 200), // Truncate for list view - kind: row.kind, - level: kindToLevel(row.kind), - status: row.status, - startTime: convertClickhouseDateTime64ToJsDate(row.start_time).toISOString(), - duration: Number(row.duration), - isCurrent: row.span_id === currentSpanId, - })); - - return json({ spans }); -};