diff --git a/ast/ast.go b/ast/ast.go index 08dd138113..4a6ae9ff1f 100644 --- a/ast/ast.go +++ b/ast/ast.go @@ -81,6 +81,7 @@ type SelectQuery struct { Limit Expression `json:"limit,omitempty"` LimitBy []Expression `json:"limit_by,omitempty"` LimitByLimit Expression `json:"limit_by_limit,omitempty"` // LIMIT value before BY (e.g., LIMIT 1 BY x LIMIT 3) + LimitByOffset Expression `json:"limit_by_offset,omitempty"` // Offset for LIMIT BY (e.g., LIMIT 2, 3 BY x -> offset=2) LimitByHasLimit bool `json:"limit_by_has_limit,omitempty"` // true if LIMIT BY was followed by another LIMIT Offset Expression `json:"offset,omitempty"` Settings []*SettingExpr `json:"settings,omitempty"` @@ -242,20 +243,22 @@ func (s *SettingExpr) End() token.Position { return s.Position } // InsertQuery represents an INSERT statement. type InsertQuery struct { - Position token.Position `json:"-"` - Database string `json:"database,omitempty"` - Table string `json:"table,omitempty"` - Function *FunctionCall `json:"function,omitempty"` // For INSERT INTO FUNCTION syntax - Columns []*Identifier `json:"columns,omitempty"` - AllColumns bool `json:"all_columns,omitempty"` // For (*) syntax meaning all columns - PartitionBy Expression `json:"partition_by,omitempty"` // For PARTITION BY clause - Infile string `json:"infile,omitempty"` // For FROM INFILE clause - Compression string `json:"compression,omitempty"` // For COMPRESSION clause - Values [][]Expression `json:"-"` // For VALUES clause (format only, not in AST JSON) - Select Statement `json:"select,omitempty"` - Format *Identifier `json:"format,omitempty"` - HasSettings bool `json:"has_settings,omitempty"` // For SETTINGS clause - Settings []*SettingExpr `json:"settings,omitempty"` // For SETTINGS clause in INSERT + Position token.Position `json:"-"` + Database string `json:"database,omitempty"` + Table string `json:"table,omitempty"` + Function *FunctionCall `json:"function,omitempty"` // For INSERT INTO FUNCTION syntax + Columns []*Identifier `json:"columns,omitempty"` + ColumnExpressions []Expression `json:"column_expressions,omitempty"` // For asterisk/COLUMNS expressions with transformers + AllColumns bool `json:"all_columns,omitempty"` // For (*) syntax meaning all columns + PartitionBy Expression `json:"partition_by,omitempty"` // For PARTITION BY clause + Infile string `json:"infile,omitempty"` // For FROM INFILE clause + Compression string `json:"compression,omitempty"` // For COMPRESSION clause + Values [][]Expression `json:"-"` // For VALUES clause (format only, not in AST JSON) + Select Statement `json:"select,omitempty"` + With []Expression `json:"with,omitempty"` // For WITH ... INSERT ... SELECT syntax + Format *Identifier `json:"format,omitempty"` + HasSettings bool `json:"has_settings,omitempty"` // For SETTINGS clause + Settings []*SettingExpr `json:"settings,omitempty"` // For SETTINGS clause in INSERT } func (i *InsertQuery) Pos() token.Position { return i.Position } @@ -280,9 +283,10 @@ type CreateQuery struct { Projections []*Projection `json:"projections,omitempty"` Constraints []*Constraint `json:"constraints,omitempty"` ColumnsPrimaryKey []Expression `json:"columns_primary_key,omitempty"` // PRIMARY KEY in column list - Engine *EngineClause `json:"engine,omitempty"` - OrderBy []Expression `json:"order_by,omitempty"` - PartitionBy Expression `json:"partition_by,omitempty"` + Engine *EngineClause `json:"engine,omitempty"` + OrderBy []Expression `json:"order_by,omitempty"` + OrderByHasModifiers bool `json:"order_by_has_modifiers,omitempty"` // True if ORDER BY has ASC/DESC modifiers + PartitionBy Expression `json:"partition_by,omitempty"` PrimaryKey []Expression `json:"primary_key,omitempty"` SampleBy Expression `json:"sample_by,omitempty"` TTL *TTLClause `json:"ttl,omitempty"` @@ -298,6 +302,7 @@ type CreateQuery struct { AlterUser bool `json:"alter_user,omitempty"` HasAuthenticationData bool `json:"has_authentication_data,omitempty"` AuthenticationValues []string `json:"authentication_values,omitempty"` // Password/hash values from IDENTIFIED BY + SSHKeyCount int `json:"ssh_key_count,omitempty"` // Number of SSH keys for ssh_key auth CreateDictionary bool `json:"create_dictionary,omitempty"` DictionaryAttrs []*DictionaryAttributeDeclaration `json:"dictionary_attrs,omitempty"` DictionaryDef *DictionaryDefinition `json:"dictionary_def,omitempty"` @@ -488,8 +493,9 @@ func (e *EngineClause) End() token.Position { return e.Position } // TTLClause represents a TTL clause. type TTLClause struct { - Position token.Position `json:"-"` - Expression Expression `json:"expression"` + Position token.Position `json:"-"` + Expression Expression `json:"expression"` + Expressions []Expression `json:"expressions,omitempty"` // Additional TTL expressions (for multiple TTL elements) } func (t *TTLClause) Pos() token.Position { return t.Position } @@ -516,7 +522,8 @@ type DropQuery struct { OnCluster string `json:"on_cluster,omitempty"` DropDatabase bool `json:"drop_database,omitempty"` Sync bool `json:"sync,omitempty"` - Format string `json:"format,omitempty"` // For FORMAT clause + Format string `json:"format,omitempty"` // For FORMAT clause + Settings []*SettingExpr `json:"settings,omitempty"` // For SETTINGS clause } func (d *DropQuery) Pos() token.Position { return d.Position } @@ -530,6 +537,7 @@ type UndropQuery struct { Table string `json:"table"` OnCluster string `json:"on_cluster,omitempty"` UUID string `json:"uuid,omitempty"` + Format string `json:"format,omitempty"` } func (u *UndropQuery) Pos() token.Position { return u.Position } @@ -580,6 +588,7 @@ type AlterCommand struct { IndexType string `json:"index_type,omitempty"` IndexDef *IndexDefinition `json:"index_def,omitempty"` // For ADD INDEX with full definition Granularity int `json:"granularity,omitempty"` + AfterIndex string `json:"after_index,omitempty"` // For ADD INDEX ... AFTER name Constraint *Constraint `json:"constraint,omitempty"` ConstraintName string `json:"constraint_name,omitempty"` Partition Expression `json:"partition,omitempty"` @@ -654,6 +663,7 @@ const ( AlterModifyTTL AlterCommandType = "MODIFY_TTL" AlterMaterializeTTL AlterCommandType = "MATERIALIZE_TTL" AlterModifySetting AlterCommandType = "MODIFY_SETTING" + AlterResetSetting AlterCommandType = "RESET_SETTING" AlterDropPartition AlterCommandType = "DROP_PARTITION" AlterDetachPartition AlterCommandType = "DETACH_PARTITION" AlterAttachPartition AlterCommandType = "ATTACH_PARTITION" @@ -683,6 +693,7 @@ const ( // TruncateQuery represents a TRUNCATE statement. type TruncateQuery struct { Position token.Position `json:"-"` + Temporary bool `json:"temporary,omitempty"` IfExists bool `json:"if_exists,omitempty"` Database string `json:"database,omitempty"` Table string `json:"table"` @@ -700,6 +711,7 @@ type DeleteQuery struct { Database string `json:"database,omitempty"` Table string `json:"table"` Where Expression `json:"where,omitempty"` + Settings []*SettingExpr `json:"settings,omitempty"` } func (d *DeleteQuery) Pos() token.Position { return d.Position } @@ -730,15 +742,20 @@ func (d *DetachQuery) statementNode() {} // AttachQuery represents an ATTACH statement. type AttachQuery struct { - Position token.Position `json:"-"` - Database string `json:"database,omitempty"` - Table string `json:"table,omitempty"` - Dictionary string `json:"dictionary,omitempty"` - Columns []*ColumnDeclaration `json:"columns,omitempty"` - ColumnsPrimaryKey []Expression `json:"columns_primary_key,omitempty"` // PRIMARY KEY in column list - Engine *EngineClause `json:"engine,omitempty"` - OrderBy []Expression `json:"order_by,omitempty"` - PrimaryKey []Expression `json:"primary_key,omitempty"` + Position token.Position `json:"-"` + Database string `json:"database,omitempty"` + Table string `json:"table,omitempty"` + Dictionary string `json:"dictionary,omitempty"` + Columns []*ColumnDeclaration `json:"columns,omitempty"` + ColumnsPrimaryKey []Expression `json:"columns_primary_key,omitempty"` // PRIMARY KEY in column list + Engine *EngineClause `json:"engine,omitempty"` + OrderBy []Expression `json:"order_by,omitempty"` + PrimaryKey []Expression `json:"primary_key,omitempty"` + IsMaterializedView bool `json:"is_materialized_view,omitempty"` + UUID string `json:"uuid,omitempty"` // UUID clause + InnerUUID string `json:"inner_uuid,omitempty"` // TO INNER UUID clause + PartitionBy Expression `json:"partition_by,omitempty"` + SelectQuery Statement `json:"select_query,omitempty"` // AS SELECT clause } func (a *AttachQuery) Pos() token.Position { return a.Position } @@ -762,15 +779,17 @@ func (d *DescribeQuery) statementNode() {} // ShowQuery represents a SHOW statement. type ShowQuery struct { - Position token.Position `json:"-"` - ShowType ShowType `json:"show_type"` - Database string `json:"database,omitempty"` - From string `json:"from,omitempty"` - Like string `json:"like,omitempty"` - Where Expression `json:"where,omitempty"` - Limit Expression `json:"limit,omitempty"` - Format string `json:"format,omitempty"` - HasSettings bool `json:"has_settings,omitempty"` // Whether SETTINGS clause was specified + Position token.Position `json:"-"` + ShowType ShowType `json:"show_type"` + Temporary bool `json:"temporary,omitempty"` + Database string `json:"database,omitempty"` + From string `json:"from,omitempty"` + Like string `json:"like,omitempty"` + Where Expression `json:"where,omitempty"` + Limit Expression `json:"limit,omitempty"` + Format string `json:"format,omitempty"` + HasSettings bool `json:"has_settings,omitempty"` // Whether SETTINGS clause was specified + MultipleUsers bool `json:"multiple_users,omitempty"` // True when SHOW CREATE USER has multiple users } func (s *ShowQuery) Pos() token.Position { return s.Position } @@ -798,6 +817,7 @@ const ( ShowDictionaries ShowType = "DICTIONARIES" ShowFunctions ShowType = "FUNCTIONS" ShowSettings ShowType = "SETTINGS" + ShowSetting ShowType = "SETTING" ShowGrants ShowType = "GRANTS" ) @@ -860,6 +880,8 @@ type CheckQuery struct { Position token.Position `json:"-"` Database string `json:"database,omitempty"` Table string `json:"table"` + Partition Expression `json:"partition,omitempty"` + Part Expression `json:"part,omitempty"` Format string `json:"format,omitempty"` Settings []*SettingExpr `json:"settings,omitempty"` } @@ -943,6 +965,7 @@ const ( type ExistsQuery struct { Position token.Position `json:"-"` ExistsType ExistsType `json:"exists_type,omitempty"` + Temporary bool `json:"temporary,omitempty"` Database string `json:"database,omitempty"` Table string `json:"table"` Settings []*SettingExpr `json:"settings,omitempty"` @@ -1033,6 +1056,37 @@ func (d *DropSettingsProfileQuery) Pos() token.Position { return d.Position } func (d *DropSettingsProfileQuery) End() token.Position { return d.Position } func (d *DropSettingsProfileQuery) statementNode() {} +// CreateNamedCollectionQuery represents a CREATE NAMED COLLECTION statement. +type CreateNamedCollectionQuery struct { + Position token.Position `json:"-"` + Name string `json:"name,omitempty"` +} + +func (c *CreateNamedCollectionQuery) Pos() token.Position { return c.Position } +func (c *CreateNamedCollectionQuery) End() token.Position { return c.Position } +func (c *CreateNamedCollectionQuery) statementNode() {} + +// AlterNamedCollectionQuery represents an ALTER NAMED COLLECTION statement. +type AlterNamedCollectionQuery struct { + Position token.Position `json:"-"` + Name string `json:"name,omitempty"` +} + +func (a *AlterNamedCollectionQuery) Pos() token.Position { return a.Position } +func (a *AlterNamedCollectionQuery) End() token.Position { return a.Position } +func (a *AlterNamedCollectionQuery) statementNode() {} + +// DropNamedCollectionQuery represents a DROP NAMED COLLECTION statement. +type DropNamedCollectionQuery struct { + Position token.Position `json:"-"` + Name string `json:"name,omitempty"` + IfExists bool `json:"if_exists,omitempty"` +} + +func (d *DropNamedCollectionQuery) Pos() token.Position { return d.Position } +func (d *DropNamedCollectionQuery) End() token.Position { return d.Position } +func (d *DropNamedCollectionQuery) statementNode() {} + // ShowCreateSettingsProfileQuery represents a SHOW CREATE SETTINGS PROFILE statement. type ShowCreateSettingsProfileQuery struct { Position token.Position `json:"-"` @@ -1211,11 +1265,15 @@ func (t *TableIdentifier) expressionNode() {} // Literal represents a literal value. type Literal struct { - Position token.Position `json:"-"` - Type LiteralType `json:"type"` - Value interface{} `json:"value"` - Source string `json:"source,omitempty"` // Original source text (for preserving 0.0 vs 0) - Negative bool `json:"negative,omitempty"` // True if literal was explicitly negative (for -0) + Position token.Position `json:"-"` + Type LiteralType `json:"type"` + Value interface{} `json:"value"` + Source string `json:"source,omitempty"` // Original source text (for preserving 0.0 vs 0) + Negative bool `json:"negative,omitempty"` // True if literal was explicitly negative (for -0) + Parenthesized bool `json:"parenthesized,omitempty"` // True if wrapped in explicit parentheses + SpacedCommas bool `json:"spaced_commas,omitempty"` // True if array/tuple had spaces after commas + SpacedBrackets bool `json:"spaced_brackets,omitempty"` // True if array had whitespace after [ and before ] + IsBigInt bool `json:"is_big_int,omitempty"` // True if this is a large integer stored as string } func (l *Literal) Pos() token.Position { return l.Position } @@ -1331,6 +1389,7 @@ type FunctionCall struct { Arguments []Expression `json:"arguments,omitempty"` Settings []*SettingExpr `json:"settings,omitempty"` // For table functions with SETTINGS Distinct bool `json:"distinct,omitempty"` + Filter Expression `json:"filter,omitempty"` // FILTER(WHERE condition) clause Over *WindowSpec `json:"over,omitempty"` Alias string `json:"alias,omitempty"` SQLStandard bool `json:"sql_standard,omitempty"` // True for SQL standard syntax like TRIM(... FROM ...) diff --git a/internal/explain/dictionary.go b/internal/explain/dictionary.go index 15f5753315..99eed8333c 100644 --- a/internal/explain/dictionary.go +++ b/internal/explain/dictionary.go @@ -80,16 +80,16 @@ func explainDictionaryDefinition(sb *strings.Builder, n *ast.DictionaryDefinitio explainDictionaryLifetime(sb, n.Lifetime, indent+" ", depth+1) } - // RANGE (if present, comes before LAYOUT) - if n.Range != nil { - explainDictionaryRange(sb, n.Range, indent+" ", depth+1) - } - - // LAYOUT + // LAYOUT (comes before RANGE in EXPLAIN output) if n.Layout != nil { explainDictionaryLayout(sb, n.Layout, indent+" ", depth+1) } + // RANGE + if n.Range != nil { + explainDictionaryRange(sb, n.Range, indent+" ", depth+1) + } + // SETTINGS if len(n.Settings) > 0 { fmt.Fprintf(sb, "%s Set\n", indent) diff --git a/internal/explain/explain.go b/internal/explain/explain.go index c0c85444c5..947ec23f84 100644 --- a/internal/explain/explain.go +++ b/internal/explain/explain.go @@ -155,6 +155,12 @@ func Node(sb *strings.Builder, node interface{}, depth int) { fmt.Fprintf(sb, "%sCreateSettingsProfileQuery\n", indent) case *ast.DropSettingsProfileQuery: fmt.Fprintf(sb, "%sDROP SETTINGS PROFILE query\n", indent) + case *ast.CreateNamedCollectionQuery: + fmt.Fprintf(sb, "%sCreateNamedCollectionQuery\n", indent) + case *ast.AlterNamedCollectionQuery: + fmt.Fprintf(sb, "%sAlterNamedCollectionQuery\n", indent) + case *ast.DropNamedCollectionQuery: + fmt.Fprintf(sb, "%sDropNamedCollectionQuery\n", indent) case *ast.ShowCreateSettingsProfileQuery: // Use PROFILES (plural) when multiple profiles are specified queryName := "SHOW CREATE SETTINGS PROFILE query" @@ -334,6 +340,9 @@ func Column(sb *strings.Builder, col *ast.ColumnDeclaration, depth int) { if len(col.Settings) > 0 { children++ } + if col.Comment != "" { + children++ + } if children > 0 { fmt.Fprintf(sb, "%sColumnDeclaration %s (children %d)\n", indent, col.Name, children) } else { @@ -360,6 +369,9 @@ func Column(sb *strings.Builder, col *ast.ColumnDeclaration, depth int) { if len(col.Settings) > 0 { fmt.Fprintf(sb, "%s Set\n", indent) } + if col.Comment != "" { + fmt.Fprintf(sb, "%s Literal \\'%s\\'\n", indent, col.Comment) + } } // explainCodecExpr handles CODEC expressions in column declarations diff --git a/internal/explain/expressions.go b/internal/explain/expressions.go index 792646ed27..7cb700fdae 100644 --- a/internal/explain/expressions.go +++ b/internal/explain/expressions.go @@ -2,6 +2,7 @@ package explain import ( "fmt" + "strconv" "strings" "github.com/sqlc-dev/doubleclick/ast" @@ -402,8 +403,9 @@ func collectLogicalOperands(n *ast.BinaryExpr) []ast.Expression { func explainUnaryExpr(sb *strings.Builder, n *ast.UnaryExpr, indent string, depth int) { // Handle negate of literal numbers - output as negative literal instead of function + // BUT only if the literal is NOT parenthesized (e.g., -1 folds, but -(1) stays as negate function) if n.Op == "-" { - if lit, ok := n.Operand.(*ast.Literal); ok { + if lit, ok := n.Operand.(*ast.Literal); ok && !lit.Parenthesized { switch lit.Type { case ast.LiteralInteger: // Convert positive integer to negative @@ -433,6 +435,19 @@ func explainUnaryExpr(sb *strings.Builder, n *ast.UnaryExpr, indent string, dept s := FormatFloat(-val) fmt.Fprintf(sb, "%sLiteral Float64_%s\n", indent, s) return + case ast.LiteralString: + // Handle BigInt - very large numbers stored as strings + // ClickHouse converts these to Float64 in scientific notation + if lit.IsBigInt { + if strVal, ok := lit.Value.(string); ok { + // Parse the string as float64 and negate it + if f, err := strconv.ParseFloat(strVal, 64); err == nil { + s := FormatFloat(-f) + fmt.Fprintf(sb, "%sLiteral Float64_%s\n", indent, s) + return + } + } + } } } } @@ -477,8 +492,13 @@ func explainAliasedExpr(sb *strings.Builder, n *ast.AliasedExpr, depth int) { needsFunctionFormat = true break } - // Also check if nested arrays/tuples contain non-literal elements + // Check if tuple contains array literals - these need Function tuple format if lit, ok := expr.(*ast.Literal); ok { + if lit.Type == ast.LiteralArray { + needsFunctionFormat = true + break + } + // Also check if nested arrays/tuples contain non-literal elements if containsNonLiteralInNested(lit) { needsFunctionFormat = true break diff --git a/internal/explain/format.go b/internal/explain/format.go index f0a48c5bba..4c266ed03c 100644 --- a/internal/explain/format.go +++ b/internal/explain/format.go @@ -160,9 +160,22 @@ func formatArrayLiteral(val interface{}) string { if lit.Type == ast.LiteralInteger { switch val := lit.Value.(type) { case int64: - parts = append(parts, fmt.Sprintf("Int64_%d", -val)) + negVal := -val + // ClickHouse normalizes -0 to UInt64_0 + if negVal == 0 { + parts = append(parts, "UInt64_0") + } else if negVal > 0 { + parts = append(parts, fmt.Sprintf("UInt64_%d", negVal)) + } else { + parts = append(parts, fmt.Sprintf("Int64_%d", negVal)) + } case uint64: - parts = append(parts, fmt.Sprintf("Int64_-%d", val)) + // ClickHouse normalizes -0 to UInt64_0 + if val == 0 { + parts = append(parts, "UInt64_0") + } else { + parts = append(parts, fmt.Sprintf("Int64_-%d", val)) + } default: parts = append(parts, fmt.Sprintf("Int64_-%v", lit.Value)) } @@ -195,8 +208,19 @@ func formatNumericExpr(e ast.Expression) (string, bool) { if lit, ok := unary.Operand.(*ast.Literal); ok { switch val := lit.Value.(type) { case int64: - return fmt.Sprintf("Int64_%d", -val), true + negVal := -val + // ClickHouse normalizes -0 to UInt64_0 + if negVal == 0 { + return "UInt64_0", true + } else if negVal > 0 { + return fmt.Sprintf("UInt64_%d", negVal), true + } + return fmt.Sprintf("Int64_%d", negVal), true case uint64: + // ClickHouse normalizes -0 to UInt64_0 + if val == 0 { + return "UInt64_0", true + } return fmt.Sprintf("Int64_%d", -int64(val)), true case float64: return fmt.Sprintf("Float64_%s", FormatFloat(-val)), true @@ -289,6 +313,13 @@ func FormatDataType(dt *ast.DataType) string { } else if ident, ok := p.(*ast.Identifier); ok { // Identifier (e.g., function name in AggregateFunction types) params = append(params, ident.Name()) + } else if unary, ok := p.(*ast.UnaryExpr); ok { + // Unary expression (e.g., -1 for negative numbers) + if lit, ok := unary.Operand.(*ast.Literal); ok { + params = append(params, fmt.Sprintf("%s%v", unary.Op, lit.Value)) + } else { + params = append(params, fmt.Sprintf("%v", p)) + } } else { params = append(params, fmt.Sprintf("%v", p)) } @@ -469,7 +500,7 @@ func formatExprAsString(expr ast.Expression) string { case ast.LiteralNull: return "NULL" case ast.LiteralArray: - return formatArrayAsString(e.Value) + return formatArrayAsStringFromLiteral(e) case ast.LiteralTuple: return formatTupleAsString(e.Value) default: @@ -519,6 +550,28 @@ func formatExprAsString(expr ast.Expression) string { } } +// formatArrayAsStringFromLiteral formats an array literal as a string for :: cast syntax +// It preserves original spacing from the source +func formatArrayAsStringFromLiteral(lit *ast.Literal) string { + exprs, ok := lit.Value.([]ast.Expression) + if !ok { + return "[]" + } + var parts []string + for _, e := range exprs { + parts = append(parts, formatElementAsString(e)) + } + separator := "," + if lit.SpacedCommas { + separator = ", " + } + // Use outer spaces when source had whitespace after [ (e.g., for multi-line arrays) + if lit.SpacedBrackets { + return "[ " + strings.Join(parts, separator) + " ]" + } + return "[" + strings.Join(parts, separator) + "]" +} + // formatArrayAsString formats an array literal as a string for :: cast syntax func formatArrayAsString(val interface{}) string { exprs, ok := val.([]ast.Expression) @@ -555,9 +608,14 @@ func formatElementAsString(expr ast.Expression) string { case ast.LiteralFloat: return fmt.Sprintf("%v", e.Value) case ast.LiteralString: + s := e.Value.(string) + // Check if this is a big integer stored as string (too large for int64/uint64) + // These should NOT be quoted when formatted in arrays + if e.IsBigInt { + return s + } // Quote strings with single quotes, triple-escape for nested context // Expected output format is \\\' (three backslashes + quote) - s := e.Value.(string) // Triple-escape single quotes for nested string literal context s = strings.ReplaceAll(s, "'", "\\\\\\'") return "\\\\\\'" + s + "\\\\\\'" @@ -569,7 +627,7 @@ func formatElementAsString(expr ast.Expression) string { case ast.LiteralNull: return "NULL" case ast.LiteralArray: - return formatArrayAsString(e.Value) + return formatArrayAsStringFromLiteral(e) case ast.LiteralTuple: return formatTupleAsString(e.Value) default: diff --git a/internal/explain/functions.go b/internal/explain/functions.go index 16d5f93c35..dba21ee756 100644 --- a/internal/explain/functions.go +++ b/internal/explain/functions.go @@ -115,13 +115,35 @@ func explainFunctionCallWithAlias(sb *strings.Builder, n *ast.FunctionCall, alia if n.Distinct { fnName = fnName + "Distinct" } + // Append "If" if the function has a FILTER clause + if n.Filter != nil { + fnName = fnName + "If" + } if alias != "" { fmt.Fprintf(sb, "%sFunction %s (alias %s) (children %d)\n", indent, fnName, alias, children) } else { fmt.Fprintf(sb, "%sFunction %s (children %d)\n", indent, fnName, children) } // Arguments (Settings are included as part of argument count) - argCount := len(n.Arguments) + // FILTER condition is appended to arguments for -If suffix functions + // count(name) FILTER (WHERE cond) -> countIf(name, cond) - 2 args + // count(*) FILTER (WHERE cond) -> countIf(cond) - 1 arg (asterisk dropped) + var argCount int + filterArgs := n.Arguments + if n.Filter != nil { + // Filter condition is appended as an extra argument + // But first, remove any Asterisk arguments (count(*) case) + var nonAsteriskArgs []ast.Expression + for _, arg := range n.Arguments { + if _, isAsterisk := arg.(*ast.Asterisk); !isAsterisk { + nonAsteriskArgs = append(nonAsteriskArgs, arg) + } + } + filterArgs = nonAsteriskArgs + argCount = len(filterArgs) + 1 // +1 for filter condition + } else { + argCount = len(n.Arguments) + } if len(n.Settings) > 0 { argCount++ // Set is counted as one argument } @@ -130,7 +152,12 @@ func explainFunctionCallWithAlias(sb *strings.Builder, n *ast.FunctionCall, alia fmt.Fprintf(sb, " (children %d)", argCount) } fmt.Fprintln(sb) - for _, arg := range n.Arguments { + // Output arguments (filterArgs excludes Asterisk when FILTER is present) + argsToOutput := filterArgs + if n.Filter == nil { + argsToOutput = n.Arguments + } + for _, arg := range argsToOutput { // For view() table function, unwrap Subquery wrapper // Also reset the subquery context since view() SELECT is not in a Subquery node if strings.ToLower(n.Name) == "view" { @@ -144,6 +171,10 @@ func explainFunctionCallWithAlias(sb *strings.Builder, n *ast.FunctionCall, alia } Node(sb, arg, depth+2) } + // Append filter condition at the end + if n.Filter != nil { + Node(sb, n.Filter, depth+2) + } // Settings appear as Set node inside ExpressionList if len(n.Settings) > 0 { fmt.Fprintf(sb, "%s Set\n", indent) @@ -567,8 +598,8 @@ func explainCastExprWithAlias(sb *strings.Builder, n *ast.CastExpr, alias string if lit.Type == ast.LiteralArray || lit.Type == ast.LiteralTuple { if useArrayFormat { fmt.Fprintf(sb, "%s Literal %s\n", indent, FormatLiteral(lit)) - } else if containsCastExpressions(lit) { - // Array contains CastExpr elements - output as Function array with children + } else if containsCastExpressions(lit) || !containsOnlyLiterals(lit) { + // Array contains CastExpr or non-literal elements - output as Function array with children Node(sb, n.Expr, depth+2) } else { // Simple literals (including negative numbers) - format as string @@ -738,6 +769,7 @@ func containsCastExpressions(lit *ast.Literal) bool { } // containsOnlyLiterals checks if a literal array/tuple contains only literal values (no expressions) +// This includes negated literals (UnaryExpr with Op="-" and Literal operand) func containsOnlyLiterals(lit *ast.Literal) bool { var exprs []ast.Expression switch lit.Type { @@ -752,16 +784,24 @@ func containsOnlyLiterals(lit *ast.Literal) bool { } for _, e := range exprs { - innerLit, ok := e.(*ast.Literal) - if !ok { - return false + // Check if it's a direct literal + if innerLit, ok := e.(*ast.Literal); ok { + // Nested arrays/tuples need recursive check + if innerLit.Type == ast.LiteralArray || innerLit.Type == ast.LiteralTuple { + if !containsOnlyLiterals(innerLit) { + return false + } + } + continue } - // Nested arrays/tuples need recursive check - if innerLit.Type == ast.LiteralArray || innerLit.Type == ast.LiteralTuple { - if !containsOnlyLiterals(innerLit) { - return false + // Check if it's a negated literal (e.g., -1) + if unary, ok := e.(*ast.UnaryExpr); ok && unary.Op == "-" { + if _, isLit := unary.Operand.(*ast.Literal); isLit { + continue } } + // Not a literal or negated literal + return false } return true } @@ -986,10 +1026,11 @@ func explainInExpr(sb *strings.Builder, n *ast.InExpr, indent string, depth int) // Check if this tuple contains only primitive literals (including unary negation) if !containsOnlyPrimitiveLiteralsWithUnary(lit) { allTuplesArePrimitive = false + allPrimitiveLiterals = false // Non-primitive tuple breaks the mixed literal check too } } - // Check if it's a primitive literal type (not a tuple or complex type) - if lit.Type == ast.LiteralTuple || lit.Type == ast.LiteralArray { + // Arrays break the primitive literals check + if lit.Type == ast.LiteralArray { allPrimitiveLiterals = false } } else if isNumericExpr(item) { @@ -1133,7 +1174,8 @@ func explainInExprWithAlias(sb *strings.Builder, n *ast.InExpr, alias string, in allBooleansOrNull := true allTuples := true allTuplesArePrimitive := true - hasNonNull := false // Need at least one non-null value + allPrimitiveLiterals := true // Any mix of primitive literals (numbers, strings, booleans, null, primitive tuples) + hasNonNull := false // Need at least one non-null value for _, item := range n.List { if lit, ok := item.(*ast.Literal); ok { if lit.Type == ast.LiteralNull { @@ -1155,6 +1197,7 @@ func explainInExprWithAlias(sb *strings.Builder, n *ast.InExpr, alias string, in } else { if !containsOnlyPrimitiveLiterals(lit) { allTuplesArePrimitive = false + allPrimitiveLiterals = false } } } else if isNumericExpr(item) { @@ -1167,10 +1210,11 @@ func explainInExprWithAlias(sb *strings.Builder, n *ast.InExpr, alias string, in allStringsOrNull = false allBooleansOrNull = false allTuples = false + allPrimitiveLiterals = false break } } - canBeTupleLiteral = hasNonNull && (allNumericOrNull || (allStringsOrNull && len(n.List) <= maxStringTupleSizeWithAlias) || allBooleansOrNull || (allTuples && allTuplesArePrimitive)) + canBeTupleLiteral = hasNonNull && (allNumericOrNull || (allStringsOrNull && len(n.List) <= maxStringTupleSizeWithAlias) || allBooleansOrNull || (allTuples && allTuplesArePrimitive) || allPrimitiveLiterals) } // Count arguments diff --git a/internal/explain/select.go b/internal/explain/select.go index 994947c136..9b5febb49f 100644 --- a/internal/explain/select.go +++ b/internal/explain/select.go @@ -61,7 +61,7 @@ func extractWithClause(stmt ast.Statement) []ast.Expression { } // explainSelectQueryWithInheritedWith outputs a SELECT with an inherited WITH clause -// The inherited WITH clause is output AFTER the columns (not before, like a regular WITH) +// The inherited WITH clause is output at the END of children (after columns and tables) func explainSelectQueryWithInheritedWith(sb *strings.Builder, stmt ast.Statement, inheritedWith []ast.Expression, depth int) { sq, ok := stmt.(*ast.SelectQuery) if !ok { @@ -76,23 +76,17 @@ func explainSelectQueryWithInheritedWith(sb *strings.Builder, stmt ast.Statement return } - // Output SelectQuery with inherited WITH clause after columns + // Output SelectQuery with inherited WITH clause at the end indent := strings.Repeat(" ", depth) children := countSelectQueryChildren(sq) + 1 // +1 for inherited WITH clause fmt.Fprintf(sb, "%sSelectQuery (children %d)\n", indent, children) - // Columns (ExpressionList) - output BEFORE inherited WITH + // Columns (ExpressionList) - output first fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(sq.Columns)) for _, col := range sq.Columns { Node(sb, col, depth+2) } - // Inherited WITH clause (ExpressionList) - output AFTER columns - fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(inheritedWith)) - for _, w := range inheritedWith { - Node(sb, w, depth+2) - } - // FROM (including ARRAY JOIN as part of TablesInSelectQuery) if sq.From != nil || sq.ArrayJoin != nil { TablesWithArrayJoin(sb, sq.From, sq.ArrayJoin, depth+1) @@ -145,23 +139,31 @@ func explainSelectQueryWithInheritedWith(sb *strings.Builder, stmt ast.Statement Node(sb, i, depth+2) } } - // OFFSET - if sq.Offset != nil { - Node(sb, sq.Offset, depth+1) - } - // LIMIT BY handling + // LIMIT BY handling - order: LimitByOffset, LimitByLimit, LimitBy expressions, Offset, Limit if sq.LimitByLimit != nil { + // Output LIMIT BY offset first (if present) + if sq.LimitByOffset != nil { + Node(sb, sq.LimitByOffset, depth+1) + } + // Output LIMIT BY count Node(sb, sq.LimitByLimit, depth+1) + // Output LIMIT BY expressions if len(sq.LimitBy) > 0 { fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(sq.LimitBy)) for _, expr := range sq.LimitBy { Node(sb, expr, depth+2) } } + // Output regular OFFSET + if sq.Offset != nil { + Node(sb, sq.Offset, depth+1) + } + // Output regular LIMIT if sq.Limit != nil { Node(sb, sq.Limit, depth+1) } } else if len(sq.LimitBy) > 0 { + // LIMIT BY without explicit LimitByLimit if sq.Limit != nil { Node(sb, sq.Limit, depth+1) } @@ -169,8 +171,14 @@ func explainSelectQueryWithInheritedWith(sb *strings.Builder, stmt ast.Statement for _, expr := range sq.LimitBy { Node(sb, expr, depth+2) } - } else if sq.Limit != nil { - Node(sb, sq.Limit, depth+1) + } else { + // No LIMIT BY - just regular OFFSET and LIMIT + if sq.Offset != nil { + Node(sb, sq.Offset, depth+1) + } + if sq.Limit != nil { + Node(sb, sq.Limit, depth+1) + } } // SETTINGS (when no INTERPOLATE - the case with INTERPOLATE is handled above) if len(sq.Settings) > 0 && len(sq.Interpolate) == 0 && !sq.SettingsAfterFormat { @@ -180,6 +188,105 @@ func explainSelectQueryWithInheritedWith(sb *strings.Builder, stmt ast.Statement if sq.Top != nil { Node(sb, sq.Top, depth+1) } + + // Inherited WITH clause (ExpressionList) - output at the END + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(inheritedWith)) + for _, w := range inheritedWith { + Node(sb, w, depth+2) + } +} + +// ExplainSelectWithInheritedWith recursively explains a select statement with inherited WITH clause +// This is used for WITH ... INSERT ... SELECT where the WITH clause belongs to the INSERT +// but needs to be output at the end of each SelectQuery in the tree +func ExplainSelectWithInheritedWith(sb *strings.Builder, stmt ast.Statement, inheritedWith []ast.Expression, depth int) { + switch s := stmt.(type) { + case *ast.SelectWithUnionQuery: + explainSelectWithUnionQueryWithInheritedWith(sb, s, inheritedWith, depth) + case *ast.SelectIntersectExceptQuery: + explainSelectIntersectExceptQueryWithInheritedWith(sb, s, inheritedWith, depth) + case *ast.SelectQuery: + explainSelectQueryWithInheritedWith(sb, s, inheritedWith, depth) + default: + Node(sb, stmt, depth) + } +} + +// explainSelectWithUnionQueryWithInheritedWith explains a SelectWithUnionQuery with inherited WITH +func explainSelectWithUnionQueryWithInheritedWith(sb *strings.Builder, n *ast.SelectWithUnionQuery, inheritedWith []ast.Expression, depth int) { + if n == nil { + return + } + indent := strings.Repeat(" ", depth) + children := countSelectUnionChildren(n) + fmt.Fprintf(sb, "%sSelectWithUnionQuery (children %d)\n", indent, children) + + selects := simplifyUnionSelects(n.Selects) + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(selects)) + for _, sel := range selects { + ExplainSelectWithInheritedWith(sb, sel, inheritedWith, depth+2) + } + + // INTO OUTFILE clause + for _, sel := range n.Selects { + if sq, ok := sel.(*ast.SelectQuery); ok && sq.IntoOutfile != nil { + fmt.Fprintf(sb, "%s Literal \\'%s\\'\n", indent, sq.IntoOutfile.Filename) + break + } + } + // SETTINGS before FORMAT + if n.SettingsBeforeFormat && len(n.Settings) > 0 { + fmt.Fprintf(sb, "%s Set\n", indent) + } + // FORMAT clause - check individual SelectQuery nodes + for _, sel := range n.Selects { + if sq, ok := sel.(*ast.SelectQuery); ok && sq.Format != nil { + Node(sb, sq.Format, depth+1) + break + } + } + // SETTINGS after FORMAT + if n.SettingsAfterFormat && len(n.Settings) > 0 { + fmt.Fprintf(sb, "%s Set\n", indent) + } else { + for _, sel := range n.Selects { + if sq, ok := sel.(*ast.SelectQuery); ok && sq.SettingsAfterFormat && len(sq.Settings) > 0 { + fmt.Fprintf(sb, "%s Set\n", indent) + break + } + } + } +} + +// explainSelectIntersectExceptQueryWithInheritedWith explains a SelectIntersectExceptQuery with inherited WITH +func explainSelectIntersectExceptQueryWithInheritedWith(sb *strings.Builder, n *ast.SelectIntersectExceptQuery, inheritedWith []ast.Expression, depth int) { + indent := strings.Repeat(" ", depth) + fmt.Fprintf(sb, "%sSelectIntersectExceptQuery (children %d)\n", indent, len(n.Selects)) + + // Check if EXCEPT is present - affects how first operand is wrapped + hasExcept := false + for _, op := range n.Operators { + if strings.HasPrefix(op, "EXCEPT") { + hasExcept = true + break + } + } + + for i, sel := range n.Selects { + if hasExcept && i == 0 { + // Wrap first operand in SelectWithUnionQuery format + if _, isUnion := sel.(*ast.SelectWithUnionQuery); isUnion { + ExplainSelectWithInheritedWith(sb, sel, inheritedWith, depth+1) + } else { + childIndent := strings.Repeat(" ", depth+1) + fmt.Fprintf(sb, "%sSelectWithUnionQuery (children 1)\n", childIndent) + fmt.Fprintf(sb, "%s ExpressionList (children 1)\n", childIndent) + ExplainSelectWithInheritedWith(sb, sel, inheritedWith, depth+3) + } + } else { + ExplainSelectWithInheritedWith(sb, sel, inheritedWith, depth+1) + } + } } func explainSelectWithUnionQuery(sb *strings.Builder, n *ast.SelectWithUnionQuery, indent string, depth int) { @@ -267,9 +374,14 @@ func explainSelectQuery(sb *strings.Builder, n *ast.SelectQuery, indent string, // but we need to unwrap tuples and output elements directly if lit, ok := g.(*ast.Literal); ok && lit.Type == ast.LiteralTuple { if elements, ok := lit.Value.([]ast.Expression); ok { - fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(elements)) - for _, elem := range elements { - Node(sb, elem, depth+3) + if len(elements) == 0 { + // Empty grouping set () outputs ExpressionList without children count + fmt.Fprintf(sb, "%s ExpressionList\n", indent) + } else { + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(elements)) + for _, elem := range elements { + Node(sb, elem, depth+3) + } } } else { // Fallback for unexpected tuple value type @@ -319,25 +431,31 @@ func explainSelectQuery(sb *strings.Builder, n *ast.SelectQuery, indent string, Node(sb, i, depth+2) } } - // OFFSET (ClickHouse outputs offset before limit in EXPLAIN AST) - if n.Offset != nil { - Node(sb, n.Offset, depth+1) - } - // LIMIT BY handling + // LIMIT BY handling - order: LimitByOffset, LimitByLimit, LimitBy expressions, Offset, Limit if n.LimitByLimit != nil { - // Case: LIMIT n BY x LIMIT m -> output LimitByLimit, LimitBy, Limit + // Output LIMIT BY offset first (if present) + if n.LimitByOffset != nil { + Node(sb, n.LimitByOffset, depth+1) + } + // Output LIMIT BY count Node(sb, n.LimitByLimit, depth+1) + // Output LIMIT BY expressions if len(n.LimitBy) > 0 { fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(n.LimitBy)) for _, expr := range n.LimitBy { Node(sb, expr, depth+2) } } + // Output regular OFFSET + if n.Offset != nil { + Node(sb, n.Offset, depth+1) + } + // Output regular LIMIT if n.Limit != nil { Node(sb, n.Limit, depth+1) } } else if len(n.LimitBy) > 0 { - // Case: LIMIT n BY x (no second LIMIT) -> output Limit, then LimitBy + // LIMIT BY without explicit LimitByLimit if n.Limit != nil { Node(sb, n.Limit, depth+1) } @@ -345,9 +463,14 @@ func explainSelectQuery(sb *strings.Builder, n *ast.SelectQuery, indent string, for _, expr := range n.LimitBy { Node(sb, expr, depth+2) } - } else if n.Limit != nil { - // Case: plain LIMIT n (no BY) - Node(sb, n.Limit, depth+1) + } else { + // No LIMIT BY - just regular OFFSET and LIMIT + if n.Offset != nil { + Node(sb, n.Offset, depth+1) + } + if n.Limit != nil { + Node(sb, n.Limit, depth+1) + } } // SETTINGS is output at SelectQuery level only when NOT after FORMAT // When SettingsAfterFormat is true, it's output at SelectWithUnionQuery level instead @@ -519,8 +642,11 @@ func countSelectQueryChildren(n *ast.SelectQuery) int { if len(n.Interpolate) > 0 { count++ } + if n.LimitByOffset != nil { + count++ // LIMIT offset in "LIMIT offset, count BY x" + } if n.LimitByLimit != nil { - count++ // LIMIT n in "LIMIT n BY x LIMIT m" + count++ // LIMIT count in "LIMIT n BY x LIMIT m" } if n.Limit != nil { count++ diff --git a/internal/explain/statements.go b/internal/explain/statements.go index f347a3afff..c9010bb659 100644 --- a/internal/explain/statements.go +++ b/internal/explain/statements.go @@ -24,7 +24,7 @@ func explainInsertQuery(sb *strings.Builder, n *ast.InsertQuery, indent string, children++ // Database identifier (separate from table) } } - if len(n.Columns) > 0 || n.AllColumns { + if len(n.ColumnExpressions) > 0 || len(n.Columns) > 0 || n.AllColumns { children++ // Column list } if n.Select != nil { @@ -70,7 +70,12 @@ func explainInsertQuery(sb *strings.Builder, n *ast.InsertQuery, indent string, } // Column list - if n.AllColumns { + if len(n.ColumnExpressions) > 0 { + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(n.ColumnExpressions)) + for _, expr := range n.ColumnExpressions { + Node(sb, expr, depth+2) + } + } else if n.AllColumns { fmt.Fprintf(sb, "%s ExpressionList (children 1)\n", indent) fmt.Fprintf(sb, "%s Asterisk\n", indent) } else if len(n.Columns) > 0 { @@ -92,7 +97,13 @@ func explainInsertQuery(sb *strings.Builder, n *ast.InsertQuery, indent string, } } } - Node(sb, n.Select, depth+1) + // If this INSERT has an inherited WITH clause (from WITH ... INSERT syntax), + // use the special explain function that outputs WITH at the end of each SelectQuery + if len(n.With) > 0 { + ExplainSelectWithInheritedWith(sb, n.Select, n.With, depth+1) + } else { + Node(sb, n.Select, depth+1) + } } if n.HasSettings { @@ -117,16 +128,26 @@ func explainCreateQuery(sb *strings.Builder, n *ast.CreateQuery, indent string, } if n.CreateUser || n.AlterUser { if n.HasAuthenticationData { - fmt.Fprintf(sb, "%sCreateUserQuery (children 1)\n", indent) - // AuthenticationData has children if there are auth values + // Each authentication value is a separate AuthenticationData child if len(n.AuthenticationValues) > 0 { - fmt.Fprintf(sb, "%s AuthenticationData (children %d)\n", indent, len(n.AuthenticationValues)) + fmt.Fprintf(sb, "%sCreateUserQuery (children %d)\n", indent, len(n.AuthenticationValues)) for _, val := range n.AuthenticationValues { + // Each AuthenticationData has 1 child (the Literal value) + fmt.Fprintf(sb, "%s AuthenticationData (children 1)\n", indent) // Escape the value - strings need \' escaping escaped := escapeStringLiteral(val) fmt.Fprintf(sb, "%s Literal \\'%s\\'\n", indent, escaped) } + } else if n.SSHKeyCount > 0 { + // SSH key authentication - each key is a PublicSSHKey child + fmt.Fprintf(sb, "%sCreateUserQuery (children 1)\n", indent) + fmt.Fprintf(sb, "%s AuthenticationData (children %d)\n", indent, n.SSHKeyCount) + for i := 0; i < n.SSHKeyCount; i++ { + fmt.Fprintf(sb, "%s PublicSSHKey\n", indent) + } } else { + // No values - just output CreateUserQuery with 1 child + fmt.Fprintf(sb, "%sCreateUserQuery (children 1)\n", indent) fmt.Fprintf(sb, "%s AuthenticationData\n", indent) } } else { @@ -389,12 +410,13 @@ func explainCreateQuery(sb *strings.Builder, n *ast.CreateQuery, indent string, Node(sb, n.PartitionBy, storageChildDepth) } } - if len(n.OrderBy) > 0 { - if len(n.OrderBy) == 1 { - if ident, ok := n.OrderBy[0].(*ast.Identifier); ok { + // PRIMARY KEY comes before ORDER BY in EXPLAIN output + if len(n.PrimaryKey) > 0 { + if len(n.PrimaryKey) == 1 { + if ident, ok := n.PrimaryKey[0].(*ast.Identifier); ok { fmt.Fprintf(sb, "%s Identifier %s\n", storageIndent, ident.Name()) - } else if lit, ok := n.OrderBy[0].(*ast.Literal); ok && lit.Type == ast.LiteralTuple { - // Handle tuple literal (including empty tuple from ORDER BY ()) + } else if lit, ok := n.PrimaryKey[0].(*ast.Literal); ok && lit.Type == ast.LiteralTuple { + // Handle tuple literal (including empty tuple from PRIMARY KEY ()) exprs, _ := lit.Value.([]ast.Expression) fmt.Fprintf(sb, "%s Function tuple (children %d)\n", storageIndent, 1) if len(exprs) > 0 { @@ -406,40 +428,48 @@ func explainCreateQuery(sb *strings.Builder, n *ast.CreateQuery, indent string, fmt.Fprintf(sb, "%s ExpressionList\n", storageIndent) } } else { - Node(sb, n.OrderBy[0], storageChildDepth) + Node(sb, n.PrimaryKey[0], storageChildDepth) } } else { fmt.Fprintf(sb, "%s Function tuple (children %d)\n", storageIndent, 1) - fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, len(n.OrderBy)) - for _, o := range n.OrderBy { - Node(sb, o, storageChildDepth+2) + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, len(n.PrimaryKey)) + for _, p := range n.PrimaryKey { + Node(sb, p, storageChildDepth+2) } } } - if len(n.PrimaryKey) > 0 { - if len(n.PrimaryKey) == 1 { - if ident, ok := n.PrimaryKey[0].(*ast.Identifier); ok { + // ORDER BY comes after PRIMARY KEY in EXPLAIN output + if len(n.OrderBy) > 0 { + if len(n.OrderBy) == 1 { + if ident, ok := n.OrderBy[0].(*ast.Identifier); ok { fmt.Fprintf(sb, "%s Identifier %s\n", storageIndent, ident.Name()) - } else if lit, ok := n.PrimaryKey[0].(*ast.Literal); ok && lit.Type == ast.LiteralTuple { - // Handle tuple literal (including empty tuple from PRIMARY KEY ()) - exprs, _ := lit.Value.([]ast.Expression) - fmt.Fprintf(sb, "%s Function tuple (children %d)\n", storageIndent, 1) - if len(exprs) > 0 { - fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, len(exprs)) - for _, e := range exprs { - Node(sb, e, storageChildDepth+2) - } + } else if lit, ok := n.OrderBy[0].(*ast.Literal); ok && lit.Type == ast.LiteralTuple { + // Handle tuple literal - for ORDER BY with modifiers (DESC/ASC), + // ClickHouse outputs just "Function tuple" without children + // For empty tuples or regular tuples without modifiers, output children + if n.OrderByHasModifiers { + fmt.Fprintf(sb, "%s Function tuple\n", storageIndent) } else { - fmt.Fprintf(sb, "%s ExpressionList\n", storageIndent) + exprs, _ := lit.Value.([]ast.Expression) + fmt.Fprintf(sb, "%s Function tuple (children %d)\n", storageIndent, 1) + if len(exprs) > 0 { + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, len(exprs)) + for _, e := range exprs { + Node(sb, e, storageChildDepth+2) + } + } else { + fmt.Fprintf(sb, "%s ExpressionList\n", storageIndent) + } } } else { - Node(sb, n.PrimaryKey[0], storageChildDepth) + Node(sb, n.OrderBy[0], storageChildDepth) } } else { + // Multiple ORDER BY expressions without modifiers fmt.Fprintf(sb, "%s Function tuple (children %d)\n", storageIndent, 1) - fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, len(n.PrimaryKey)) - for _, p := range n.PrimaryKey { - Node(sb, p, storageChildDepth+2) + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, len(n.OrderBy)) + for _, o := range n.OrderBy { + Node(sb, o, storageChildDepth+2) } } } @@ -448,9 +478,15 @@ func explainCreateQuery(sb *strings.Builder, n *ast.CreateQuery, indent string, Node(sb, n.SampleBy, storageChildDepth) } if n.TTL != nil { - fmt.Fprintf(sb, "%s ExpressionList (children 1)\n", storageIndent) + // Count total TTL elements (1 for Expression + len(Expressions)) + ttlCount := 1 + len(n.TTL.Expressions) + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", storageIndent, ttlCount) fmt.Fprintf(sb, "%s TTLElement (children 1)\n", storageIndent) Node(sb, n.TTL.Expression, storageChildDepth+2) + for _, expr := range n.TTL.Expressions { + fmt.Fprintf(sb, "%s TTLElement (children 1)\n", storageIndent) + Node(sb, expr, storageChildDepth+2) + } } if len(n.Settings) > 0 { fmt.Fprintf(sb, "%s Set\n", storageIndent) @@ -588,13 +624,19 @@ func explainDropQuery(sb *strings.Builder, n *ast.DropQuery, indent string, dept } else { children := 1 if hasFormat { - children = 2 + children++ + } + if len(n.Settings) > 0 { + children++ } fmt.Fprintf(sb, "%sDropQuery %s (children %d)\n", indent, EscapeIdentifier(name), children) fmt.Fprintf(sb, "%s Identifier %s\n", indent, EscapeIdentifier(name)) if hasFormat { fmt.Fprintf(sb, "%s Identifier %s\n", indent, n.Format) } + if len(n.Settings) > 0 { + fmt.Fprintf(sb, "%s Set\n", indent) + } } } @@ -602,14 +644,29 @@ func explainUndropQuery(sb *strings.Builder, n *ast.UndropQuery, indent string, name := n.Table // Check if we have a database-qualified name (for UNDROP TABLE db.table) hasDatabase := n.Database != "" + hasFormat := n.Format != "" if hasDatabase { - // Database-qualified: UndropQuery db table (children 2) - fmt.Fprintf(sb, "%sUndropQuery %s %s (children %d)\n", indent, EscapeIdentifier(n.Database), EscapeIdentifier(name), 2) + // Database-qualified: UndropQuery db table (children 2 or 3) + children := 2 + if hasFormat { + children = 3 + } + fmt.Fprintf(sb, "%sUndropQuery %s %s (children %d)\n", indent, EscapeIdentifier(n.Database), EscapeIdentifier(name), children) fmt.Fprintf(sb, "%s Identifier %s\n", indent, EscapeIdentifier(n.Database)) fmt.Fprintf(sb, "%s Identifier %s\n", indent, EscapeIdentifier(name)) + if hasFormat { + fmt.Fprintf(sb, "%s Identifier %s\n", indent, n.Format) + } } else { - fmt.Fprintf(sb, "%sUndropQuery %s (children %d)\n", indent, EscapeIdentifier(name), 1) + children := 1 + if hasFormat { + children = 2 + } + fmt.Fprintf(sb, "%sUndropQuery %s (children %d)\n", indent, EscapeIdentifier(name), children) fmt.Fprintf(sb, "%s Identifier %s\n", indent, EscapeIdentifier(name)) + if hasFormat { + fmt.Fprintf(sb, "%s Identifier %s\n", indent, n.Format) + } } } @@ -749,25 +806,44 @@ func explainExplainQuery(sb *strings.Builder, n *ast.ExplainQuery, indent string } // Check if inner statement has FORMAT clause - this should be output as child of Explain + // Also check for SETTINGS after FORMAT (these are at the EXPLAIN level, not part of the SELECT) var format *ast.Identifier + var hasSettingsAfterFormat bool + var savedSettings []*ast.SettingExpr if swu, ok := n.Statement.(*ast.SelectWithUnionQuery); ok { + // Check for union-level settings after format + if swu.SettingsAfterFormat && len(swu.Settings) > 0 { + hasSettingsAfterFormat = true + savedSettings = swu.Settings + swu.Settings = nil + defer func() { swu.Settings = savedSettings }() + } for _, sel := range swu.Selects { - if sq, ok := sel.(*ast.SelectQuery); ok && sq.Format != nil { - format = sq.Format - // Temporarily nil out the format so it's not output by SelectWithUnionQuery - sq.Format = nil - defer func() { sq.Format = format }() + if sq, ok := sel.(*ast.SelectQuery); ok { + if sq.Format != nil { + format = sq.Format + // Temporarily nil out the format so it's not output by SelectWithUnionQuery + sq.Format = nil + defer func() { sq.Format = format }() + } + // Check for settings after format in the SelectQuery + if sq.SettingsAfterFormat && len(sq.Settings) > 0 && !hasSettingsAfterFormat { + hasSettingsAfterFormat = true + savedSettings = sq.Settings + sq.Settings = nil + defer func() { sq.Settings = savedSettings }() + } break } } } - // Count children: settings (if present) + statement + format (if present) + // Count children: statement + format (if present) + settings (if present) children := 1 - if n.HasSettings { + if format != nil { children++ } - if format != nil { + if n.HasSettings || hasSettingsAfterFormat { children++ } @@ -778,13 +854,20 @@ func explainExplainQuery(sb *strings.Builder, n *ast.ExplainQuery, indent string } else { fmt.Fprintf(sb, "%sExplain%s (children %d)\n", indent, typeStr, children) } + // EXPLAIN-level settings (like header = 0) come BEFORE the statement if n.HasSettings { fmt.Fprintf(sb, "%s Set\n", indent) } + // Output the statement Node(sb, n.Statement, depth+1) + // Format comes after statement if format != nil { fmt.Fprintf(sb, "%s Identifier %s\n", indent, format.Parts[len(format.Parts)-1]) } + // Settings after format (at the query level, e.g., FORMAT Null SETTINGS ...) come last + if hasSettingsAfterFormat { + fmt.Fprintf(sb, "%s Set\n", indent) + } } func explainShowQuery(sb *strings.Builder, n *ast.ShowQuery, indent string) { @@ -955,11 +1038,15 @@ func explainShowQuery(sb *strings.Builder, n *ast.ShowQuery, indent string) { // SHOW CREATE USER has special output format if n.ShowType == ast.ShowCreateUser { + userWord := "USER" + if n.MultipleUsers { + userWord = "USERS" + } if n.Format != "" { - fmt.Fprintf(sb, "%sSHOW CREATE USER query (children 1)\n", indent) + fmt.Fprintf(sb, "%sSHOW CREATE %s query (children 1)\n", indent, userWord) fmt.Fprintf(sb, "%s Identifier %s\n", indent, n.Format) } else { - fmt.Fprintf(sb, "%sSHOW CREATE USER query\n", indent) + fmt.Fprintf(sb, "%sSHOW CREATE %s query\n", indent, userWord) } return } @@ -1191,7 +1278,7 @@ func explainDetachQuery(sb *strings.Builder, n *ast.DetachQuery, indent string) } func explainAttachQuery(sb *strings.Builder, n *ast.AttachQuery, indent string, depth int) { - // Count children: identifier + columns definition (if any) + storage definition (if any) + // Count children: identifier + columns definition (if any) + select query (if any) + storage/view targets (if any) children := 1 // table/database identifier if n.Database != "" && n.Table != "" { children++ // extra identifier for database @@ -1200,10 +1287,14 @@ func explainAttachQuery(sb *strings.Builder, n *ast.AttachQuery, indent string, if hasColumns { children++ } - hasStorage := n.Engine != nil || len(n.OrderBy) > 0 || len(n.PrimaryKey) > 0 - if hasStorage { + hasSelectQuery := n.SelectQuery != nil + if hasSelectQuery { children++ } + hasStorage := n.Engine != nil || len(n.OrderBy) > 0 || len(n.PrimaryKey) > 0 || n.PartitionBy != nil + if hasStorage { + children++ // ViewTargets or Storage definition + } // Output header if n.Database != "" && n.Table != "" { @@ -1259,30 +1350,64 @@ func explainAttachQuery(sb *strings.Builder, n *ast.AttachQuery, indent string, } } - // Output storage definition + // Output select query (for materialized views) + if hasSelectQuery { + Node(sb, n.SelectQuery, depth+1) + } + + // Output storage definition (or ViewTargets for materialized views) if hasStorage { storageChildren := 0 if n.Engine != nil { storageChildren++ } + if n.PartitionBy != nil { + storageChildren++ + } if len(n.OrderBy) > 0 { storageChildren++ } if len(n.PrimaryKey) > 0 { storageChildren++ } - fmt.Fprintf(sb, "%s Storage definition (children %d)\n", indent, storageChildren) - if n.Engine != nil { - fmt.Fprintf(sb, "%s Function %s\n", indent, n.Engine.Name) - } - if len(n.OrderBy) > 0 { - for _, expr := range n.OrderBy { - Node(sb, expr, depth+2) + + // For materialized views, wrap in ViewTargets + if n.IsMaterializedView { + fmt.Fprintf(sb, "%s ViewTargets (children 1)\n", indent) + fmt.Fprintf(sb, "%s Storage definition (children %d)\n", indent, storageChildren) + if n.Engine != nil { + fmt.Fprintf(sb, "%s Function %s\n", indent, n.Engine.Name) } - } - if len(n.PrimaryKey) > 0 { - for _, expr := range n.PrimaryKey { - Node(sb, expr, depth+2) + if n.PartitionBy != nil { + Node(sb, n.PartitionBy, depth+3) + } + if len(n.OrderBy) > 0 { + for _, expr := range n.OrderBy { + Node(sb, expr, depth+3) + } + } + if len(n.PrimaryKey) > 0 { + for _, expr := range n.PrimaryKey { + Node(sb, expr, depth+3) + } + } + } else { + fmt.Fprintf(sb, "%s Storage definition (children %d)\n", indent, storageChildren) + if n.Engine != nil { + fmt.Fprintf(sb, "%s Function %s\n", indent, n.Engine.Name) + } + if n.PartitionBy != nil { + Node(sb, n.PartitionBy, depth+2) + } + if len(n.OrderBy) > 0 { + for _, expr := range n.OrderBy { + Node(sb, expr, depth+2) + } + } + if len(n.PrimaryKey) > 0 { + for _, expr := range n.PrimaryKey { + Node(sb, expr, depth+2) + } } } } @@ -1350,6 +1475,10 @@ func explainAlterCommand(sb *strings.Builder, cmd *ast.AlterCommand, indent stri if cmdType == ast.AlterDeleteWhere { cmdType = "DELETE" } + // FREEZE (without partition) is shown as FREEZE_ALL in EXPLAIN AST + if cmdType == ast.AlterFreeze { + cmdType = "FREEZE_ALL" + } if children > 0 { fmt.Fprintf(sb, "%sAlterCommand %s (children %d)\n", indent, cmdType, children) } else { @@ -1398,8 +1527,13 @@ func explainAlterCommand(sb *strings.Builder, cmd *ast.AlterCommand, indent stri fmt.Fprintf(sb, "%s Identifier %s\n", indent, cmd.ColumnName) } if cmd.Partition != nil { - fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) - Node(sb, cmd.Partition, depth+2) + // PARTITION ALL is shown as Partition_ID (empty) in EXPLAIN AST + if ident, ok := cmd.Partition.(*ast.Identifier); ok && strings.ToUpper(ident.Name()) == "ALL" { + fmt.Fprintf(sb, "%s Partition_ID \n", indent) + } else { + fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) + Node(sb, cmd.Partition, depth+2) + } } case ast.AlterCommentColumn: if cmd.ColumnName != "" { @@ -1419,14 +1553,23 @@ func explainAlterCommand(sb *strings.Builder, cmd *ast.AlterCommand, indent stri } else if cmd.Index != "" { fmt.Fprintf(sb, "%s Identifier %s\n", indent, cmd.Index) } + // AFTER clause + if cmd.AfterIndex != "" { + fmt.Fprintf(sb, "%s Identifier %s\n", indent, cmd.AfterIndex) + } case ast.AlterDropIndex, ast.AlterClearIndex: if cmd.Index != "" { fmt.Fprintf(sb, "%s Identifier %s\n", indent, cmd.Index) } // CLEAR INDEX IN PARTITION clause if cmd.Partition != nil { - fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) - Node(sb, cmd.Partition, depth+2) + // PARTITION ALL is shown as Partition_ID (empty) in EXPLAIN AST + if ident, ok := cmd.Partition.(*ast.Identifier); ok && strings.ToUpper(ident.Name()) == "ALL" { + fmt.Fprintf(sb, "%s Partition_ID \n", indent) + } else { + fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) + Node(sb, cmd.Partition, depth+2) + } } case ast.AlterMaterializeIndex: if cmd.Index != "" { @@ -1466,9 +1609,15 @@ func explainAlterCommand(sb *strings.Builder, cmd *ast.AlterCommand, indent stri case ast.AlterModifyTTL: if cmd.TTL != nil && cmd.TTL.Expression != nil { // TTL is wrapped in ExpressionList and TTLElement - fmt.Fprintf(sb, "%s ExpressionList (children 1)\n", indent) + // Count total TTL elements (1 for Expression + len(Expressions)) + ttlCount := 1 + len(cmd.TTL.Expressions) + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, ttlCount) fmt.Fprintf(sb, "%s TTLElement (children 1)\n", indent) Node(sb, cmd.TTL.Expression, depth+3) + for _, expr := range cmd.TTL.Expressions { + fmt.Fprintf(sb, "%s TTLElement (children 1)\n", indent) + Node(sb, expr, depth+3) + } } case ast.AlterModifySetting: fmt.Fprintf(sb, "%s Set\n", indent) @@ -1502,6 +1651,16 @@ func explainAlterCommand(sb *strings.Builder, cmd *ast.AlterCommand, indent stri Node(sb, cmd.Where, depth+1) } case ast.AlterUpdate: + // Output order: Partition, Where, Assignments + if cmd.Partition != nil { + // PARTITION ALL is shown as Partition_ID (empty) in EXPLAIN AST + if ident, ok := cmd.Partition.(*ast.Identifier); ok && strings.ToUpper(ident.Name()) == "ALL" { + fmt.Fprintf(sb, "%s Partition_ID \n", indent) + } else { + fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) + Node(sb, cmd.Partition, depth+2) + } + } if cmd.Where != nil { Node(sb, cmd.Where, depth+1) } @@ -1543,6 +1702,14 @@ func explainAlterCommand(sb *strings.Builder, cmd *ast.AlterCommand, indent stri if cmd.SampleByExpr != nil { Node(sb, cmd.SampleByExpr, depth+1) } + case ast.AlterResetSetting: + // RESET SETTING outputs ExpressionList with Identifier children + if len(cmd.ResetSettings) > 0 { + fmt.Fprintf(sb, "%s ExpressionList (children %d)\n", indent, len(cmd.ResetSettings)) + for _, name := range cmd.ResetSettings { + fmt.Fprintf(sb, "%s Identifier %s\n", indent, name) + } + } default: if cmd.Partition != nil { Node(sb, cmd.Partition, depth+1) @@ -1697,6 +1864,10 @@ func countAlterCommandChildren(cmd *ast.AlterCommand) int { } else if cmd.Index != "" { children++ } + // AFTER clause adds another child + if cmd.AfterIndex != "" { + children++ + } case ast.AlterDropIndex, ast.AlterClearIndex: if cmd.Index != "" { children++ @@ -1745,6 +1916,9 @@ func countAlterCommandChildren(cmd *ast.AlterCommand) int { children++ } case ast.AlterUpdate: + if cmd.Partition != nil { + children++ + } if len(cmd.Assignments) > 0 { children++ } @@ -1779,6 +1953,11 @@ func countAlterCommandChildren(cmd *ast.AlterCommand) int { if cmd.SampleByExpr != nil { children = 1 } + case ast.AlterResetSetting: + // RESET SETTING: ExpressionList with setting names (1 child) + if len(cmd.ResetSettings) > 0 { + children = 1 + } default: if cmd.Partition != nil { children++ @@ -1800,6 +1979,9 @@ func explainOptimizeQuery(sb *strings.Builder, n *ast.OptimizeQuery, indent stri if n.Cleanup { name += "_cleanup" } + if n.Dedupe { + name += "_deduplicate" + } hasSettings := len(n.Settings) > 0 children := 1 // identifier @@ -1820,8 +2002,13 @@ func explainOptimizeQuery(sb *strings.Builder, n *ast.OptimizeQuery, indent stri fmt.Fprintf(sb, "%sOptimizeQuery %s (children %d)\n", indent, name, children) } if n.Partition != nil { - fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) - Node(sb, n.Partition, depth+2) + // PARTITION ALL is shown as Partition_ID (empty) in EXPLAIN AST + if ident, ok := n.Partition.(*ast.Identifier); ok && strings.ToUpper(ident.Name()) == "ALL" { + fmt.Fprintf(sb, "%s Partition_ID \n", indent) + } else { + fmt.Fprintf(sb, "%s Partition (children 1)\n", indent) + Node(sb, n.Partition, depth+2) + } } if n.Database != "" { fmt.Fprintf(sb, "%s Identifier %s\n", indent, n.Database) @@ -1869,17 +2056,23 @@ func explainDeleteQuery(sb *strings.Builder, n *ast.DeleteQuery, indent string, return } - // Count children: Where expression + table identifier + // Count children: Where expression + table identifier + settings children := 1 // table identifier if n.Where != nil { children++ } + if len(n.Settings) > 0 { + children++ + } fmt.Fprintf(sb, "%sDeleteQuery %s (children %d)\n", indent, n.Table, children) if n.Where != nil { Node(sb, n.Where, depth+1) } fmt.Fprintf(sb, "%s Identifier %s\n", indent, n.Table) + if len(n.Settings) > 0 { + fmt.Fprintf(sb, "%s Set\n", indent) + } } func explainCheckQuery(sb *strings.Builder, n *ast.CheckQuery, indent string) { diff --git a/parser/expression.go b/parser/expression.go index f60bd09810..1aeac9e127 100644 --- a/parser/expression.go +++ b/parser/expression.go @@ -114,6 +114,50 @@ func (p *Parser) parseExpressionList() []ast.Expression { return exprs } +// parseCreateOrderByExpressions parses expressions for CREATE TABLE ORDER BY clause. +// Returns the expressions and a boolean indicating if any ASC/DESC modifier was found. +// This is different from regular expression list parsing because ORDER BY in CREATE TABLE +// can have ASC/DESC modifiers that affect the EXPLAIN output (should be Function tuple if any modifier). +func (p *Parser) parseCreateOrderByExpressions() ([]ast.Expression, bool) { + var exprs []ast.Expression + hasModifier := false + + if p.currentIs(token.RPAREN) || p.currentIs(token.EOF) { + return exprs, hasModifier + } + + expr := p.parseExpression(LOWEST) + if expr != nil { + exprs = append(exprs, expr) + } + // Consume ASC/DESC modifier + if p.currentIs(token.ASC) { + hasModifier = true + p.nextToken() + } else if p.currentIs(token.DESC) { + hasModifier = true + p.nextToken() + } + + for p.currentIs(token.COMMA) { + p.nextToken() + expr := p.parseExpression(LOWEST) + if expr != nil { + exprs = append(exprs, expr) + } + // Consume ASC/DESC modifier + if p.currentIs(token.ASC) { + hasModifier = true + p.nextToken() + } else if p.currentIs(token.DESC) { + hasModifier = true + p.nextToken() + } + } + + return exprs, hasModifier +} + // isClauseKeyword returns true if the current token is a SQL clause keyword // that should terminate an expression list (used for trailing comma support) func (p *Parser) isClauseKeyword() bool { @@ -397,7 +441,11 @@ func (p *Parser) parsePrefixExpression() ast.Expression { case token.TRIM: return p.parseTrim() case token.COLUMNS: - return p.parseColumnsMatcher() + // COLUMNS() is a column matcher, but 'columns' alone is an identifier (e.g., table name) + if p.peekIs(token.LPAREN) { + return p.parseColumnsMatcher() + } + return p.parseKeywordAsIdentifier() case token.ARRAY: // array(1,2,3) constructor or array as identifier (column name) if p.peekIs(token.LPAREN) { @@ -662,12 +710,19 @@ func (p *Parser) parseFunctionCall(name string, pos token.Position) *ast.Functio p.nextToken() // skip ( - // Handle DISTINCT - if p.currentIs(token.DISTINCT) { + // Handle DISTINCT modifier (but not if DISTINCT is being used as a column name) + // If DISTINCT is followed by ) or , then it's a column reference, not a modifier + if p.currentIs(token.DISTINCT) && !p.peekIs(token.RPAREN) && !p.peekIs(token.COMMA) { fn.Distinct = true p.nextToken() } + // Handle ALL modifier (but not if ALL is being used as a column name) + // If ALL is followed by ) or , then it's a column reference, not a modifier + if p.currentIs(token.ALL) && !p.peekIs(token.RPAREN) && !p.peekIs(token.COMMA) { + p.nextToken() + } + // Handle view() and similar functions that take a subquery as argument // view(SELECT ...) should parse SELECT as a subquery if strings.ToLower(name) == "view" && (p.currentIs(token.SELECT) || p.currentIs(token.WITH)) { @@ -707,9 +762,8 @@ func (p *Parser) parseFunctionCall(name string, pos token.Position) *ast.Functio p.nextToken() // skip ( if p.currentIs(token.WHERE) { p.nextToken() // skip WHERE - // Parse the filter condition - just consume it for now - // The filter is essentially a where clause for the aggregate - p.parseExpression(LOWEST) + // Parse the filter condition and store it + fn.Filter = p.parseExpression(LOWEST) } p.expect(token.RPAREN) } @@ -743,7 +797,7 @@ func (p *Parser) parseWindowSpec() *ast.WindowSpec { return spec } - // Check for named window reference inside parentheses: OVER (w0) + // Check for named window reference inside parentheses: OVER (w0) or OVER (w0 ORDER BY ...) // This happens when the identifier is not a known clause keyword if p.currentIs(token.IDENT) { upper := strings.ToUpper(p.current.Value) @@ -751,8 +805,8 @@ func (p *Parser) parseWindowSpec() *ast.WindowSpec { if upper != "PARTITION" && upper != "ORDER" && upper != "ROWS" && upper != "RANGE" && upper != "GROUPS" { spec.Name = p.current.Value p.nextToken() - p.expect(token.RPAREN) - return spec + // Don't return early - there may be more clauses after the window name + // e.g., OVER (w1 ROWS UNBOUNDED PRECEDING) } } @@ -920,8 +974,10 @@ func (p *Parser) parseNumber() ast.Expression { // Try unsigned uint64 for large positive numbers u, uerr := strconv.ParseUint(value, base, 64) if uerr != nil { + // Too large for int64/uint64, store as string with IsBigInt flag lit.Type = ast.LiteralString lit.Value = value + lit.IsBigInt = true } else { lit.Type = ast.LiteralInteger lit.Value = u // Store as uint64 @@ -1104,8 +1160,9 @@ func (p *Parser) parseGroupedOrTuple() ast.Expression { Query: subquery, } } - // EXPLAIN as subquery - if p.currentIs(token.EXPLAIN) { + // EXPLAIN as subquery - but only if followed by tokens that make sense for EXPLAIN + // (not when EXPLAIN is used as an identifier, e.g., "explain LIKE ...") + if p.currentIs(token.EXPLAIN) && p.isExplainFollowedByStatement() { explain := p.parseExplain() p.expect(token.RPAREN) return &ast.Subquery{ @@ -1157,6 +1214,12 @@ func (p *Parser) parseGroupedOrTuple() ast.Expression { ident.Parenthesized = true } + // Mark literals as parenthesized so -(1) outputs as negate function + // instead of being folded into a negative literal + if lit, ok := first.(*ast.Literal); ok { + lit.Parenthesized = true + } + return first } @@ -1165,13 +1228,45 @@ func (p *Parser) parseArrayLiteral() ast.Expression { Position: p.current.Pos, Type: ast.LiteralArray, } + bracketPos := p.current.Pos.Offset p.nextToken() // skip [ + // Check if there's whitespace/newline after the opening bracket + // A bracket is 1 byte, so if offset difference > 1, there's whitespace + spacedBrackets := p.current.Pos.Offset > bracketPos+1 + var elements []ast.Expression - if !p.currentIs(token.RBRACKET) { - elements = p.parseExpressionList() + spacedCommas := false + + if !p.currentIs(token.RBRACKET) && !p.currentIs(token.EOF) { + // Parse first element + expr := p.parseExpression(LOWEST) + if expr != nil { + expr = p.parseImplicitAlias(expr) + elements = append(elements, expr) + } + + for p.currentIs(token.COMMA) { + commaPos := p.current.Pos.Offset + p.nextToken() // skip comma + // Check if there's whitespace between comma and next token + // A comma is 1 byte, so if offset difference > 1, there's whitespace + if p.current.Pos.Offset > commaPos+1 { + spacedCommas = true + } + if p.currentIs(token.RBRACKET) { + break // Handle trailing comma + } + expr := p.parseExpression(LOWEST) + if expr != nil { + expr = p.parseImplicitAlias(expr) + elements = append(elements, expr) + } + } } lit.Value = elements + lit.SpacedCommas = spacedCommas + lit.SpacedBrackets = spacedBrackets p.expect(token.RBRACKET) return lit @@ -1529,17 +1624,25 @@ func (p *Parser) parseInterval() ast.Expression { expr.Value = p.parseExpression(ALIAS_PREC) // Handle INTERVAL '2' AS n minute - where AS n is alias on the value - if p.currentIs(token.AS) { - p.nextToken() // skip AS - if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() { + // Only consume AS if it's followed by an identifier AND that identifier is followed by an interval unit + // This distinguishes "INTERVAL '2' AS n minute" from "INTERVAL '1 MONTH 1 DAY' AS e4" + if p.currentIs(token.AS) && (p.peekIs(token.IDENT) || p.peek.Token.IsKeyword()) { + // Look ahead to check if the identifier after alias is an interval unit + // If so, consume the alias; otherwise leave AS for the outer context + if isIntervalUnit(p.peek.Value) { + // AS is followed by unit (e.g., "AS minute") - don't consume + } else if p.peekPeekIsIntervalUnit() { + // AS alias unit pattern - consume the alias + p.nextToken() // skip AS alias := p.current.Value p.nextToken() expr.Value = p.wrapWithAlias(expr.Value, alias) } + // Otherwise, leave AS for outer context (e.g., WITH ... AS e4) } // Parse unit (interval units are identifiers like DAY, MONTH, etc.) - if p.currentIs(token.IDENT) { + if p.currentIs(token.IDENT) && isIntervalUnit(p.current.Value) { expr.Unit = strings.ToUpper(p.current.Value) p.nextToken() } @@ -2121,27 +2224,36 @@ func (p *Parser) parseArrayAccess(left ast.Expression) ast.Expression { return expr } -// parseTupleAccessFromNumber handles tuple access like t.1 where .1 was lexed as a single NUMBER token +// parseTupleAccessFromNumber handles tuple access like t.1 or t.1.2.3 where .1 or .1.2.3 was lexed as a single NUMBER token func (p *Parser) parseTupleAccessFromNumber(left ast.Expression) ast.Expression { - // The current value is like ".1" - extract the index part + // The current value is like ".1" or ".1.2" - extract the index parts indexStr := strings.TrimPrefix(p.current.Value, ".") pos := p.current.Pos p.nextToken() - idx, err := strconv.ParseInt(indexStr, 10, 64) - if err != nil { - return left - } + // Split by dots to handle chained access like .1.2.3 + parts := strings.Split(indexStr, ".") + result := left - return &ast.TupleAccess{ - Position: pos, - Tuple: left, - Index: &ast.Literal{ + for _, part := range parts { + idx, err := strconv.ParseInt(part, 10, 64) + if err != nil { + // If any part fails to parse as integer, return what we have so far + return result + } + + result = &ast.TupleAccess{ Position: pos, - Type: ast.LiteralInteger, - Value: idx, - }, + Tuple: result, + Index: &ast.Literal{ + Position: pos, + Type: ast.LiteralInteger, + Value: idx, + }, + } } + + return result } func (p *Parser) parseDotAccess(left ast.Expression) ast.Expression { @@ -2533,12 +2645,19 @@ func (p *Parser) parseKeywordAsFunction() ast.Expression { Name: name, } - // Handle DISTINCT - if p.currentIs(token.DISTINCT) { + // Handle DISTINCT modifier (but not if DISTINCT is being used as a column name) + // If DISTINCT is followed by ) or , then it's a column reference, not a modifier + if p.currentIs(token.DISTINCT) && !p.peekIs(token.RPAREN) && !p.peekIs(token.COMMA) { fn.Distinct = true p.nextToken() } + // Handle ALL modifier (but not if ALL is being used as a column name) + // If ALL is followed by ) or , then it's a column reference, not a modifier + if p.currentIs(token.ALL) && !p.peekIs(token.RPAREN) && !p.peekIs(token.COMMA) { + p.nextToken() + } + // Handle view() and similar functions that take a subquery as argument if name == "view" && (p.currentIs(token.SELECT) || p.currentIs(token.WITH)) { subquery := p.parseSelectWithUnion() @@ -2569,7 +2688,7 @@ func (p *Parser) parseKeywordAsFunction() ast.Expression { p.nextToken() // skip ( if p.currentIs(token.WHERE) { p.nextToken() // skip WHERE - p.parseExpression(LOWEST) + fn.Filter = p.parseExpression(LOWEST) } p.expect(token.RPAREN) } diff --git a/parser/parser.go b/parser/parser.go index 092dac9b27..0c3bae6c16 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -13,12 +13,33 @@ import ( "github.com/sqlc-dev/doubleclick/token" ) +// intervalUnits contains valid SQL interval unit names +var intervalUnits = map[string]bool{ + "YEAR": true, "YEARS": true, + "QUARTER": true, "QUARTERS": true, + "MONTH": true, "MONTHS": true, + "WEEK": true, "WEEKS": true, + "DAY": true, "DAYS": true, + "HOUR": true, "HOURS": true, + "MINUTE": true, "MINUTES": true, + "SECOND": true, "SECONDS": true, + "MILLISECOND": true, "MILLISECONDS": true, + "MICROSECOND": true, "MICROSECONDS": true, + "NANOSECOND": true, "NANOSECONDS": true, +} + +// isIntervalUnit checks if the given string is a valid interval unit name +func isIntervalUnit(s string) bool { + return intervalUnits[strings.ToUpper(s)] +} + // Parser parses ClickHouse SQL statements. type Parser struct { - lexer *lexer.Lexer - current lexer.Item - peek lexer.Item - errors []error + lexer *lexer.Lexer + current lexer.Item + peek lexer.Item + peekPeek lexer.Item // Third lookahead token for special cases + errors []error } // New creates a new Parser from an io.Reader. @@ -26,7 +47,8 @@ func New(r io.Reader) *Parser { p := &Parser{ lexer: lexer.New(r), } - // Read two tokens to initialize current and peek + // Read three tokens to initialize current, peek, and peekPeek + p.nextToken() p.nextToken() p.nextToken() return p @@ -34,10 +56,11 @@ func New(r io.Reader) *Parser { func (p *Parser) nextToken() { p.current = p.peek + p.peek = p.peekPeek for { - p.peek = p.lexer.NextToken() + p.peekPeek = p.lexer.NextToken() // Skip whitespace and comments - if p.peek.Token == token.WHITESPACE || p.peek.Token == token.LINE_COMMENT { + if p.peekPeek.Token == token.WHITESPACE || p.peekPeek.Token == token.LINE_COMMENT { continue } break @@ -52,6 +75,49 @@ func (p *Parser) peekIs(t token.Token) bool { return p.peek.Token == t } +func (p *Parser) peekPeekIs(t token.Token) bool { + return p.peekPeek.Token == t +} + +// isColumnsFunction checks if current token is COLUMNS function (for column expressions) +func (p *Parser) isColumnsFunction() bool { + return p.currentIs(token.COLUMNS) && p.peekIs(token.LPAREN) +} + +// peekPeekIsIntervalUnit checks if the third lookahead token is an interval unit +// This is used for distinguishing "INTERVAL '2' AS n minute" patterns +func (p *Parser) peekPeekIsIntervalUnit() bool { + return isIntervalUnit(p.peekPeek.Value) +} + +// isExplainFollowedByStatement checks if EXPLAIN is followed by tokens that indicate +// an EXPLAIN statement (SELECT, WITH, AST, SYNTAX, etc.) rather than being used as an identifier +func (p *Parser) isExplainFollowedByStatement() bool { + // EXPLAIN can be followed by: + // - SELECT, WITH (for EXPLAIN SELECT ...) + // - QUERY, AST, SYNTAX, PLAN, PIPELINE, ESTIMATE, TABLE, CURRENT (explain types) + // - Identifier for explain options like "header = 1" + // If followed by comparison operators (LIKE, =, etc.) or logical operators, it's being used as identifier + switch p.peek.Token { + case token.SELECT, token.WITH: + return true + case token.IDENT: + // Check if it's an EXPLAIN type or option + upperValue := strings.ToUpper(p.peek.Value) + switch upperValue { + case "QUERY", "AST", "SYNTAX", "PLAN", "PIPELINE", "ESTIMATE", "TABLE", "CURRENT": + return true + case "HEADER", "ACTIONS", "DESCRIPTION", "JSON", "GRAPH", "COMPACT", "INDEXES", "SORTING", "AGGREGATION": + // These are explain options + return true + } + return false + default: + // If followed by operators like LIKE, =, <, >, etc., it's being used as identifier + return false + } +} + func (p *Parser) expect(t token.Token) bool { if p.currentIs(t) { p.nextToken() @@ -142,7 +208,8 @@ func (p *Parser) parseStatement() ast.Statement { case token.SELECT: return p.parseSelectWithUnion() case token.WITH: - return p.parseSelectWithUnion() + // WITH can precede SELECT or INSERT in ClickHouse + return p.parseWithStatement() case token.FROM: // FROM ... SELECT syntax (ClickHouse extension) return p.parseFromSelectSyntax() @@ -181,6 +248,10 @@ func (p *Parser) parseStatement() ast.Statement { if p.peek.Token == token.IDENT && strings.ToUpper(p.peek.Value) == "WORKLOAD" { return p.parseDropWorkload() } + // Check for DROP NAMED COLLECTION + if p.peek.Token == token.IDENT && strings.ToUpper(p.peek.Value) == "NAMED" { + return p.parseDropNamedCollection() + } return p.parseDrop() case token.ALTER: // Check for ALTER USER @@ -203,6 +274,10 @@ func (p *Parser) parseStatement() ast.Statement { if p.peek.Token == token.IDENT && strings.ToUpper(p.peek.Value) == "ROLE" { return p.parseAlterRole() } + // Check for ALTER NAMED COLLECTION + if p.peek.Token == token.IDENT && strings.ToUpper(p.peek.Value) == "NAMED" { + return p.parseAlterNamedCollection() + } return p.parseAlter() case token.TRUNCATE: return p.parseTruncate() @@ -265,6 +340,234 @@ func (p *Parser) parseStatement() ast.Statement { } } +// parseWithStatement parses WITH ... (SELECT|INSERT) statements +// WITH clause can precede both SELECT and INSERT in ClickHouse +func (p *Parser) parseWithStatement() ast.Statement { + // Save position to check for WITH ... INSERT later + pos := p.current.Pos + + // Peek ahead to see if this is WITH ... INSERT + // We need to parse the WITH clause first to check what follows + p.nextToken() // skip WITH + + // Skip RECURSIVE keyword if present + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "RECURSIVE" { + p.nextToken() + } + + // Parse the WITH clause + with := p.parseWithClause() + + // Now check what follows: INSERT or SELECT + if p.currentIs(token.INSERT) { + // WITH ... INSERT ... SELECT syntax + ins := p.parseInsert() + if ins != nil { + // Store the WITH clause in InsertQuery.With for explain to handle + // Don't propagate to SelectQuery.With - the explain code will output + // the inherited WITH at the end of each SelectQuery's children + ins.With = with + } + return ins + } + + // For SELECT, we use parseSelectWithParsedWith to continue with normal parsing + // but with the already-parsed WITH clause + return p.parseSelectWithUnionWithParsedWith(pos, with) +} + +// parseSelectWithUnionWithParsedWith parses a SELECT with an already-parsed WITH clause +func (p *Parser) parseSelectWithUnionWithParsedWith(pos token.Position, with []ast.Expression) *ast.SelectWithUnionQuery { + query := &ast.SelectWithUnionQuery{ + Position: pos, + } + + // Parse first select with the pre-parsed WITH clause + sel := p.parseSelectWithParsedWith(with) + if sel == nil { + return nil + } + + // Check for INTERSECT/EXCEPT + if p.isIntersectExceptWithWrapper() { + stmts := []ast.Statement{sel} + var ops []string + + for p.isIntersectExceptWithWrapper() { + var op string + if p.currentIs(token.EXCEPT) { + op = "EXCEPT" + } else { + op = "INTERSECT" + } + p.nextToken() + + if p.currentIs(token.ALL) { + op += " ALL" + p.nextToken() + } else if p.currentIs(token.DISTINCT) { + op += " DISTINCT" + p.nextToken() + } + ops = append(ops, op) + + var nextStmt ast.Statement + if p.currentIs(token.LPAREN) { + p.nextToken() + nested := p.parseSelectWithUnion() + if nested == nil { + break + } + p.expect(token.RPAREN) + nextStmt = nested + } else { + nextSel := p.parseSelect() + if nextSel == nil { + break + } + nextStmt = nextSel + } + stmts = append(stmts, nextStmt) + } + + result := buildIntersectExceptTree(stmts, ops) + query.Selects = append(query.Selects, result) + + // Handle UNION after INTERSECT/EXCEPT + for p.currentIs(token.UNION) { + p.nextToken() + mode := "ALL" + if p.currentIs(token.ALL) { + p.nextToken() + } else if p.currentIs(token.DISTINCT) { + mode = "DISTINCT" + p.nextToken() + } + query.UnionModes = append(query.UnionModes, mode) + + var nextStmt ast.Statement + if p.currentIs(token.LPAREN) { + p.nextToken() + nested := p.parseSelectWithUnion() + if nested == nil { + break + } + p.expect(token.RPAREN) + nextStmt = nested + } else { + nextSel := p.parseSelect() + if nextSel == nil { + break + } + nextStmt = nextSel + } + query.Selects = append(query.Selects, nextStmt) + } + + // Parse union-level SETTINGS and FORMAT + var formatParsed bool + for p.currentIs(token.SETTINGS) || p.currentIs(token.FORMAT) { + if p.currentIs(token.SETTINGS) { + p.nextToken() + settings := p.parseSettingsList() + query.Settings = settings + if formatParsed { + query.SettingsAfterFormat = true + } else { + query.SettingsBeforeFormat = true + } + } else if p.currentIs(token.FORMAT) { + p.nextToken() + formatParsed = true + if len(query.Selects) > 0 { + if sq, ok := query.Selects[0].(*ast.SelectQuery); ok { + if p.currentIs(token.NULL) { + sq.Format = &ast.Identifier{Position: p.current.Pos, Parts: []string{"Null"}} + p.nextToken() + } else if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() { + sq.Format = &ast.Identifier{Position: p.current.Pos, Parts: []string{p.current.Value}} + p.nextToken() + } + } + } + } + } + + return query + } + + query.Selects = append(query.Selects, sel) + + // Handle UNION + for p.currentIs(token.UNION) { + p.nextToken() + mode := "ALL" + if p.currentIs(token.ALL) { + mode = "ALL" + p.nextToken() + } else if p.currentIs(token.DISTINCT) { + mode = "DISTINCT" + p.nextToken() + } + query.UnionModes = append(query.UnionModes, mode) + + var nextStmt ast.Statement + if p.currentIs(token.LPAREN) { + p.nextToken() + nested := p.parseSelectWithUnion() + if nested == nil { + break + } + p.expect(token.RPAREN) + nextStmt = nested + } else { + nextSelect := p.parseSelect() + if nextSelect == nil { + break + } + nextStmt = nextSelect + } + query.Selects = append(query.Selects, nextStmt) + } + + // Parse union-level SETTINGS and FORMAT + var formatParsed bool + for p.currentIs(token.SETTINGS) || p.currentIs(token.FORMAT) { + if p.currentIs(token.SETTINGS) { + p.nextToken() + settings := p.parseSettingsList() + query.Settings = settings + if formatParsed { + query.SettingsAfterFormat = true + } else { + query.SettingsBeforeFormat = true + } + } else if p.currentIs(token.FORMAT) { + p.nextToken() + formatParsed = true + if len(query.Selects) > 0 { + if sq, ok := query.Selects[0].(*ast.SelectQuery); ok { + if p.currentIs(token.NULL) { + sq.Format = &ast.Identifier{Position: p.current.Pos, Parts: []string{"Null"}} + p.nextToken() + } else if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() { + sq.Format = &ast.Identifier{Position: p.current.Pos, Parts: []string{p.current.Value}} + p.nextToken() + } + } + } + } + } + + return query +} + +// parseSelectWithParsedWith parses a SELECT statement with an already-parsed WITH clause +func (p *Parser) parseSelectWithParsedWith(with []ast.Expression) *ast.SelectQuery { + // Use the internal helper that does the actual parsing + return p.parseSelectInternal(with) +} + // parseSelectWithUnion parses SELECT ... UNION/INTERSECT/EXCEPT ... queries func (p *Parser) parseSelectWithUnion() *ast.SelectWithUnionQuery { query := &ast.SelectWithUnionQuery{ @@ -718,12 +1021,18 @@ func buildIntersectExceptTree(stmts []ast.Statement, ops []string) ast.Statement } func (p *Parser) parseSelect() *ast.SelectQuery { + return p.parseSelectInternal(nil) +} + +// parseSelectInternal parses a SELECT query with an optional pre-parsed WITH clause +func (p *Parser) parseSelectInternal(preParsedWith []ast.Expression) *ast.SelectQuery { sel := &ast.SelectQuery{ Position: p.current.Pos, + With: preParsedWith, } - // Handle WITH clause - if p.currentIs(token.WITH) { + // Handle WITH clause only if not pre-parsed + if preParsedWith == nil && p.currentIs(token.WITH) { p.nextToken() // Skip RECURSIVE keyword if present if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "RECURSIVE" { @@ -732,7 +1041,16 @@ func (p *Parser) parseSelect() *ast.SelectQuery { sel.With = p.parseWithClause() } - if !p.expect(token.SELECT) { + // Handle FROM ... SELECT syntax (ClickHouse extension) + // This can come after WITH clause: WITH 1 as n FROM ... SELECT ... + if p.currentIs(token.FROM) { + p.nextToken() // skip FROM + sel.From = p.parseTablesInSelect() + // Now expect SELECT + if !p.expect(token.SELECT) { + return nil + } + } else if !p.expect(token.SELECT) { return nil } @@ -892,6 +1210,19 @@ func (p *Parser) parseSelect() *ast.SelectQuery { // LIMIT BY clause (ClickHouse specific: LIMIT n BY expr1, expr2, ...) if p.currentIs(token.BY) { p.nextToken() + // If we had comma syntax (LIMIT offset, count BY ...), save values for LIMIT BY + // Otherwise just LIMIT n BY ... uses n as the count + if sel.Offset != nil { + // LIMIT offset, count BY ... -> LimitByOffset=offset, LimitByLimit=count + sel.LimitByOffset = sel.Offset + sel.LimitByLimit = sel.Limit + sel.Offset = nil + sel.Limit = nil + } else { + // LIMIT n BY ... -> LimitByLimit=n + sel.LimitByLimit = sel.Limit + sel.Limit = nil + } // Parse LIMIT BY expressions for !p.isEndOfExpression() { expr := p.parseExpression(LOWEST) @@ -905,8 +1236,6 @@ func (p *Parser) parseSelect() *ast.SelectQuery { // After LIMIT BY, there can be another LIMIT for overall output if p.currentIs(token.LIMIT) { p.nextToken() - // Save the LIMIT BY limit value (e.g., LIMIT 1 BY x -> LimitByLimit=1) - sel.LimitByLimit = sel.Limit sel.Limit = p.parseExpression(LOWEST) sel.LimitByHasLimit = true } @@ -930,6 +1259,11 @@ func (p *Parser) parseSelect() *ast.SelectQuery { // LIMIT n OFFSET m BY expr syntax - handle BY after OFFSET if p.currentIs(token.BY) && sel.Limit != nil && len(sel.LimitBy) == 0 { p.nextToken() + // Move Limit and Offset to LimitByLimit and LimitByOffset + sel.LimitByLimit = sel.Limit + sel.LimitByOffset = sel.Offset + sel.Limit = nil + sel.Offset = nil // Parse LIMIT BY expressions for !p.isEndOfExpression() { expr := p.parseExpression(LOWEST) @@ -1334,6 +1668,10 @@ func (p *Parser) parseTableExpression() *ast.TableExpression { // SELECT, WITH, or nested (SELECT...) for UNION queries like ((SELECT 1) UNION ALL SELECT 2) subquery := p.parseSelectWithUnion() expr.Table = &ast.Subquery{Query: subquery} + } else if p.currentIs(token.FROM) { + // FROM ... SELECT (ClickHouse extension) - e.g., FROM (FROM numbers(1) SELECT *) + subquery := p.parseFromSelectSyntax() + expr.Table = &ast.Subquery{Query: subquery} } else if p.currentIs(token.EXPLAIN) { // EXPLAIN as subquery in FROM clause explain := p.parseExplain() @@ -1646,11 +1984,24 @@ func (p *Parser) parseInsert() *ast.InsertQuery { // Parse column list if p.currentIs(token.LPAREN) { p.nextToken() - // Check for (*) meaning all columns - if p.currentIs(token.ASTERISK) { - ins.AllColumns = true - p.nextToken() + // Check for special column expressions (*, table.*, COLUMNS(...), with EXCEPT/APPLY/REPLACE) + if p.currentIs(token.ASTERISK) || p.isColumnsFunction() || + (p.currentIs(token.IDENT) && p.peekIs(token.DOT) && p.peekPeekIs(token.ASTERISK)) { + // Parse as expression to handle EXCEPT/APPLY/REPLACE transformers + expr := p.parseExpression(LOWEST) + if expr != nil { + ins.ColumnExpressions = append(ins.ColumnExpressions, expr) + } + // Handle comma-separated expressions + for p.currentIs(token.COMMA) { + p.nextToken() + expr = p.parseExpression(LOWEST) + if expr != nil { + ins.ColumnExpressions = append(ins.ColumnExpressions, expr) + } + } } else { + // Regular column names for !p.currentIs(token.RPAREN) && !p.currentIs(token.EOF) { pos := p.current.Pos colName := p.parseIdentifierName() @@ -1859,12 +2210,7 @@ func (p *Parser) parseCreate() ast.Statement { p.parseCreateDictionary(create) case "NAMED": // CREATE NAMED COLLECTION name AS key=value, ... - p.nextToken() // skip NAMED - // Skip "COLLECTION" if present - if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "COLLECTION" { - p.nextToken() - } - p.parseCreateGeneric(create) + return p.parseCreateNamedCollection(pos) case "PROFILE": // CREATE PROFILE (without SETTINGS keyword) return p.parseCreateSettingsProfile(pos) @@ -2066,6 +2412,15 @@ func (p *Parser) parseCreateTable(create *ast.CreateQuery) { } } + // Handle UUID clause (CREATE TABLE name UUID 'uuid-value' ...) + // The UUID is not shown in EXPLAIN AST output, but we need to skip it + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "UUID" { + p.nextToken() // skip UUID + if p.currentIs(token.STRING) { + p.nextToken() // skip the UUID value + } + } + // Parse column definitions and indexes if p.currentIs(token.LPAREN) { p.nextToken() @@ -2222,17 +2577,19 @@ func (p *Parser) parseTableOptions(create *ast.CreateQuery) { if p.currentIs(token.LPAREN) { pos := p.current.Pos p.nextToken() - exprs := p.parseExpressionList() + exprs, hasModifier := p.parseCreateOrderByExpressions() p.expect(token.RPAREN) - // Store tuple literal for ORDER BY (expr1, expr2, ...) or ORDER BY () - if len(exprs) == 0 || len(exprs) > 1 { + // Track if any ASC/DESC modifiers were present + create.OrderByHasModifiers = hasModifier + // Store tuple literal for ORDER BY with multiple exprs, empty tuple, or any with ASC/DESC modifiers + if len(exprs) == 0 || len(exprs) > 1 || hasModifier { create.OrderBy = []ast.Expression{&ast.Literal{ Position: pos, Type: ast.LiteralTuple, Value: exprs, }} } else { - // Single expression in parentheses - just extract it + // Single expression in parentheses without modifiers - just extract it create.OrderBy = exprs } } else { @@ -2276,6 +2633,16 @@ func (p *Parser) parseTableOptions(create *ast.CreateQuery) { Position: p.current.Pos, Expression: p.parseExpression(ALIAS_PREC), // Use ALIAS_PREC for AS SELECT } + // Skip RECOMPRESS CODEC(...) if present + p.skipTTLModifiers() + // Parse additional TTL elements (comma-separated) + for p.currentIs(token.COMMA) { + p.nextToken() // skip comma + expr := p.parseExpression(ALIAS_PREC) + create.TTL.Expressions = append(create.TTL.Expressions, expr) + // Skip RECOMPRESS CODEC(...) if present + p.skipTTLModifiers() + } // Handle TTL GROUP BY x SET y = max(y) syntax if p.currentIs(token.GROUP) { p.nextToken() @@ -2387,12 +2754,38 @@ func (p *Parser) parseCreateView(create *ast.CreateQuery) { } // Parse column definitions (e.g., CREATE VIEW v (x UInt64) AS SELECT ...) + // For MATERIALIZED VIEW, this can also include INDEX, PROJECTION, and PRIMARY KEY if p.currentIs(token.LPAREN) { p.nextToken() for !p.currentIs(token.RPAREN) && !p.currentIs(token.EOF) { - col := p.parseColumnDeclaration() - if col != nil { - create.Columns = append(create.Columns, col) + // Handle INDEX definition + if p.currentIs(token.INDEX) { + idx := p.parseIndexDefinition() + if idx != nil { + create.Indexes = append(create.Indexes, idx) + } + } else if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "PROJECTION" { + // Parse PROJECTION definitions: PROJECTION name (SELECT ...) + p.nextToken() // skip PROJECTION + proj := p.parseProjection() + if proj != nil { + create.Projections = append(create.Projections, proj) + } + } else if p.currentIs(token.PRIMARY) { + // PRIMARY KEY in column list + p.nextToken() // skip PRIMARY + if p.currentIs(token.KEY) { + p.nextToken() // skip KEY + expr := p.parseExpression(LOWEST) + if expr != nil { + create.ColumnsPrimaryKey = append(create.ColumnsPrimaryKey, expr) + } + } + } else { + col := p.parseColumnDeclaration() + if col != nil { + create.Columns = append(create.Columns, col) + } } if p.currentIs(token.COMMA) { p.nextToken() @@ -2541,8 +2934,11 @@ func (p *Parser) parseCreateUser(create *ast.CreateQuery) { if p.currentIs(token.WITH) { p.nextToken() } + // Check for ssh_key authentication method + isSSHKey := p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "SSH_KEY" // Skip auth method name (plaintext_password, sha256_password, etc.) // Stop at BY (token), comma, or next section keywords + gotAuthValue := false for p.currentIs(token.IDENT) { ident := strings.ToUpper(p.current.Value) // Stop at HOST, SETTINGS, DEFAULT, GRANTEES - don't consume these @@ -2550,19 +2946,48 @@ func (p *Parser) parseCreateUser(create *ast.CreateQuery) { break } p.nextToken() - // Handle REALM/SERVER string values (for kerberos/ldap) + // Handle REALM/SERVER string values (for kerberos/ldap) - capture them! if p.currentIs(token.STRING) && (ident == "REALM" || ident == "SERVER") { + create.AuthenticationValues = append(create.AuthenticationValues, p.current.Value) + gotAuthValue = true p.nextToken() } } - // Check for BY 'value' (BY is a keyword token, not IDENT) + // Check for BY 'value' or BY KEY ... TYPE ... (SSH key auth) if p.currentIs(token.BY) { p.nextToken() - if p.currentIs(token.STRING) { + if isSSHKey { + // Parse SSH key format: BY KEY 'key' TYPE 'type' [, KEY 'key' TYPE 'type' ...] + for { + if p.currentIs(token.KEY) { + p.nextToken() + if p.currentIs(token.STRING) { + p.nextToken() // skip key value + } + // Skip TYPE 'algorithm' + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "TYPE" { + p.nextToken() + if p.currentIs(token.STRING) { + p.nextToken() // skip type value + } + } + create.SSHKeyCount++ + } + // Check for comma (multiple keys) + if p.currentIs(token.COMMA) { + p.nextToken() + continue + } + break + } + gotAuthValue = true + } else if p.currentIs(token.STRING) { create.AuthenticationValues = append(create.AuthenticationValues, p.current.Value) + gotAuthValue = true p.nextToken() } } + _ = gotAuthValue // suppress unused variable warning if any // Check for comma (multiple auth methods) if p.currentIs(token.COMMA) { p.nextToken() @@ -2617,7 +3042,9 @@ func (p *Parser) parseAlterUser() *ast.CreateQuery { break } p.nextToken() + // Handle REALM/SERVER string values (for kerberos/ldap) - capture them! if p.currentIs(token.STRING) && (ident == "REALM" || ident == "SERVER") { + create.AuthenticationValues = append(create.AuthenticationValues, p.current.Value) p.nextToken() } } @@ -3049,6 +3476,108 @@ func (p *Parser) parseCreateQuota(pos token.Position) *ast.CreateQuotaQuery { return query } +func (p *Parser) parseCreateNamedCollection(pos token.Position) *ast.CreateNamedCollectionQuery { + query := &ast.CreateNamedCollectionQuery{ + Position: pos, + } + + // Skip NAMED keyword + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "NAMED" { + p.nextToken() + } + + // Skip COLLECTION keyword + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "COLLECTION" { + p.nextToken() + } + + // Parse collection name + if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() || p.currentIs(token.STRING) { + query.Name = p.current.Value + p.nextToken() + } + + // Skip the rest of the statement (AS key=value, ...) + for !p.currentIs(token.EOF) && !p.currentIs(token.SEMICOLON) { + p.nextToken() + } + + return query +} + +func (p *Parser) parseAlterNamedCollection() *ast.AlterNamedCollectionQuery { + pos := p.current.Pos + p.nextToken() // skip ALTER + + query := &ast.AlterNamedCollectionQuery{ + Position: pos, + } + + // Skip NAMED keyword + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "NAMED" { + p.nextToken() + } + + // Skip COLLECTION keyword + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "COLLECTION" { + p.nextToken() + } + + // Parse collection name + if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() || p.currentIs(token.STRING) { + query.Name = p.current.Value + p.nextToken() + } + + // Skip the rest of the statement (DELETE key, SET key=value, ...) + for !p.currentIs(token.EOF) && !p.currentIs(token.SEMICOLON) { + p.nextToken() + } + + return query +} + +func (p *Parser) parseDropNamedCollection() *ast.DropNamedCollectionQuery { + pos := p.current.Pos + p.nextToken() // skip DROP + + query := &ast.DropNamedCollectionQuery{ + Position: pos, + } + + // Skip NAMED keyword + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "NAMED" { + p.nextToken() + } + + // Skip COLLECTION keyword + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "COLLECTION" { + p.nextToken() + } + + // Handle IF EXISTS + if p.currentIs(token.IF) { + p.nextToken() + if p.currentIs(token.EXISTS) { + query.IfExists = true + p.nextToken() + } + } + + // Parse collection name + if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() || p.currentIs(token.STRING) { + query.Name = p.current.Value + p.nextToken() + } + + // Skip the rest of the statement + for !p.currentIs(token.EOF) && !p.currentIs(token.SEMICOLON) { + p.nextToken() + } + + return query +} + func (p *Parser) parseShowCreateRole(pos token.Position) *ast.ShowCreateRoleQuery { query := &ast.ShowCreateRoleQuery{ Position: pos, @@ -3403,10 +3932,17 @@ func (p *Parser) parseDictionaryPrimaryKey() []ast.Expression { p.nextToken() // skip ) } } else { - // Single identifier - expr := p.parseExpression(LOWEST) - if expr != nil { - keys = append(keys, expr) + // Can be comma-separated identifiers: PRIMARY KEY id, id_key + for { + expr := p.parseExpression(LOWEST) + if expr != nil { + keys = append(keys, expr) + } + if p.currentIs(token.COMMA) { + p.nextToken() + } else { + break + } } } @@ -3749,7 +4285,7 @@ func (p *Parser) parseColumnDeclaration() *ast.ColumnDeclaration { } // Parse COMMENT - if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "COMMENT" { + if p.currentIs(token.COMMENT) { p.nextToken() if p.currentIs(token.STRING) { col.Comment = p.current.Value @@ -3967,8 +4503,8 @@ func (p *Parser) isDataTypeName(name string) bool { types := []string{ "INT", "INT8", "INT16", "INT32", "INT64", "INT128", "INT256", "UINT8", "UINT16", "UINT32", "UINT64", "UINT128", "UINT256", - "FLOAT32", "FLOAT64", "FLOAT", "BFLOAT16", - "DECIMAL", "DECIMAL32", "DECIMAL64", "DECIMAL128", "DECIMAL256", + "FLOAT32", "FLOAT64", "FLOAT", "DOUBLE", "BFLOAT16", + "DECIMAL", "DECIMAL32", "DECIMAL64", "DECIMAL128", "DECIMAL256", "DEC", "STRING", "FIXEDSTRING", "UUID", "DATE", "DATE32", "DATETIME", "DATETIME64", "ENUM", "ENUM8", "ENUM16", @@ -3982,6 +4518,7 @@ func (p *Parser) isDataTypeName(name string) bool { "POINT", "RING", "POLYGON", "MULTIPOLYGON", "TIME64", "TIME", "DYNAMIC", + "QBIT", } for _, t := range types { if upper == t { @@ -4001,7 +4538,8 @@ func (p *Parser) parseCodecExpr() *ast.CodecExpr { } for !p.currentIs(token.RPAREN) && !p.currentIs(token.EOF) { - if p.currentIs(token.IDENT) { + // Accept IDENT or keywords as codec names (e.g., "Default" is a keyword) + if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() { name := p.current.Value pos := p.current.Pos p.nextToken() @@ -4142,7 +4680,9 @@ func (p *Parser) parseEngineClause() *ast.EngineClause { engine.HasParentheses = true p.nextToken() if !p.currentIs(token.RPAREN) { - engine.Parameters = p.parseExpressionList() + // Engine parameters should not parse implicit aliases + // e.g., Distributed('cluster', database, table) - table is NOT an alias for database + engine.Parameters = p.parseEngineParameters() } p.expect(token.RPAREN) } @@ -4150,6 +4690,31 @@ func (p *Parser) parseEngineClause() *ast.EngineClause { return engine } +// parseEngineParameters parses comma-separated expressions for engine clauses +// without treating identifiers as implicit aliases +func (p *Parser) parseEngineParameters() []ast.Expression { + var exprs []ast.Expression + + if p.currentIs(token.RPAREN) || p.currentIs(token.EOF) { + return exprs + } + + expr := p.parseExpression(LOWEST) + if expr != nil { + exprs = append(exprs, expr) + } + + for p.currentIs(token.COMMA) { + p.nextToken() + expr := p.parseExpression(LOWEST) + if expr != nil { + exprs = append(exprs, expr) + } + } + + return exprs +} + func (p *Parser) parseDrop() *ast.DropQuery { drop := &ast.DropQuery{ Position: p.current.Pos, @@ -4234,12 +4799,15 @@ func (p *Parser) parseDrop() *ast.DropQuery { } } - // Handle IF EXISTS + // Handle IF EXISTS or IF EMPTY if p.currentIs(token.IF) { p.nextToken() if p.currentIs(token.EXISTS) { drop.IfExists = true p.nextToken() + } else if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "EMPTY" { + // IF EMPTY - skip the EMPTY keyword + p.nextToken() } } @@ -4433,6 +5001,12 @@ func (p *Parser) parseDrop() *ast.DropQuery { } } + // Handle SYNC (can appear before or after FORMAT) + if p.currentIs(token.SYNC) { + drop.Sync = true + p.nextToken() + } + // Handle FORMAT clause (for things like DROP TABLE ... FORMAT Null) if p.currentIs(token.FORMAT) { p.nextToken() @@ -4446,7 +5020,7 @@ func (p *Parser) parseDrop() *ast.DropQuery { } } - // Handle SYNC + // Handle SYNC again (can also appear after FORMAT) if p.currentIs(token.SYNC) { drop.Sync = true p.nextToken() @@ -4460,6 +5034,12 @@ func (p *Parser) parseDrop() *ast.DropQuery { } } + // Handle SETTINGS clause + if p.currentIs(token.SETTINGS) { + p.nextToken() // skip SETTINGS + drop.Settings = p.parseSettingsList() + } + return drop } @@ -4599,12 +5179,16 @@ func (p *Parser) parseAlterCommand() *ast.AlterCommand { Position: p.current.Pos, Name: idxName, } - // Parse expression in parentheses + // Parse expression - can be in parentheses or bare expression until TYPE keyword if p.currentIs(token.LPAREN) { p.nextToken() idx.Expression = p.parseExpression(LOWEST) cmd.IndexExpr = idx.Expression p.expect(token.RPAREN) + } else if !p.currentIs(token.IDENT) || strings.ToUpper(p.current.Value) != "TYPE" { + // Parse bare expression (not in parentheses) - ends at TYPE keyword + idx.Expression = p.parseExpression(ALIAS_PREC) + cmd.IndexExpr = idx.Expression } // Parse TYPE if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "TYPE" { @@ -4641,6 +5225,14 @@ func (p *Parser) parseAlterCommand() *ast.AlterCommand { p.nextToken() } } + // Parse AFTER + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "AFTER" { + p.nextToken() + if p.currentIs(token.IDENT) { + cmd.AfterIndex = p.current.Value + p.nextToken() + } + } cmd.IndexDef = idx } else if p.currentIs(token.CONSTRAINT) { cmd.Type = ast.AlterAddConstraint @@ -4911,6 +5503,24 @@ func (p *Parser) parseAlterCommand() *ast.AlterCommand { } cmd.Type = ast.AlterRemoveSampleBy } + } else if upper == "RESET" { + p.nextToken() // skip RESET + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "SETTING" { + p.nextToken() // skip SETTING + cmd.Type = ast.AlterResetSetting + // Parse comma-separated list of setting names + for { + if p.currentIs(token.IDENT) || p.current.Token.IsKeyword() { + cmd.ResetSettings = append(cmd.ResetSettings, p.current.Value) + p.nextToken() + } + if p.currentIs(token.COMMA) { + p.nextToken() + } else { + break + } + } + } } else { return nil } @@ -4984,6 +5594,16 @@ func (p *Parser) parseAlterCommand() *ast.AlterCommand { Position: p.current.Pos, Expression: p.parseExpression(LOWEST), } + // Skip RECOMPRESS CODEC(...) and other TTL modifiers + p.skipTTLModifiers() + // Parse additional TTL elements (comma-separated) + for p.currentIs(token.COMMA) { + p.nextToken() // skip comma + expr := p.parseExpression(LOWEST) + cmd.TTL.Expressions = append(cmd.TTL.Expressions, expr) + // Skip RECOMPRESS CODEC(...) if present + p.skipTTLModifiers() + } } else if p.currentIs(token.SETTINGS) || (p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "SETTING") { // Both SETTINGS and SETTING (singular) are accepted cmd.Type = ast.AlterModifySetting @@ -5206,6 +5826,27 @@ func (p *Parser) parseAlterCommand() *ast.AlterCommand { } p.nextToken() // skip comma } + // Handle IN PARTITION (UPDATE ... IN PARTITION WHERE ...) + // The expression parser may have incorrectly consumed "expr IN PARTITION" as an InExpression. + // Check if the last assignment value is an InExpression with right side being "PARTITION". + if len(cmd.Assignments) > 0 { + lastAssign := cmd.Assignments[len(cmd.Assignments)-1] + if inExpr, ok := lastAssign.Value.(*ast.InExpr); ok && len(inExpr.List) == 1 { + if ident, ok := inExpr.List[0].(*ast.Identifier); ok && strings.ToUpper(ident.Name()) == "PARTITION" { + // Fix the mis-parse: the actual assignment value is the left side of IN + lastAssign.Value = inExpr.Expr + // Current token should be the partition expression (e.g., ALL) + cmd.Partition = p.parseExpression(LOWEST) + } + } + } + if p.currentIs(token.IN) { + p.nextToken() // skip IN + if p.currentIs(token.PARTITION) { + p.nextToken() // skip PARTITION + cmd.Partition = p.parseExpression(LOWEST) + } + } if p.currentIs(token.WHERE) { p.nextToken() // skip WHERE cmd.Where = p.parseExpression(LOWEST) @@ -5224,6 +5865,12 @@ func (p *Parser) parseTruncate() *ast.TruncateQuery { p.nextToken() // skip TRUNCATE + // Handle TEMPORARY keyword + if p.currentIs(token.TEMPORARY) { + trunc.Temporary = true + p.nextToken() + } + if p.currentIs(token.TABLE) { p.nextToken() } @@ -5308,6 +5955,18 @@ func (p *Parser) parseUndrop() *ast.UndropQuery { } } + // Handle FORMAT clause + if p.currentIs(token.FORMAT) { + p.nextToken() + if p.currentIs(token.NULL) { + undrop.Format = "Null" + p.nextToken() + } else if p.currentIs(token.IDENT) { + undrop.Format = p.current.Value + p.nextToken() + } + } + return undrop } @@ -5396,6 +6055,12 @@ func (p *Parser) parseDelete() *ast.DeleteQuery { del.Where = p.parseExpression(LOWEST) } + // Parse SETTINGS clause + if p.currentIs(token.SETTINGS) { + p.nextToken() // skip SETTINGS + del.Settings = p.parseSettingsList() + } + return del } @@ -5499,6 +6164,12 @@ func (p *Parser) parseShow() ast.Statement { Position: pos, } + // Handle TEMPORARY keyword (SHOW TEMPORARY TABLES) + if p.currentIs(token.TEMPORARY) { + show.Temporary = true + p.nextToken() + } + switch p.current.Token { case token.TABLES: show.ShowType = ast.ShowTables @@ -5554,7 +6225,11 @@ func (p *Parser) parseShow() ast.Statement { show.ShowType = ast.ShowCreateUser p.nextToken() // Skip user name and host pattern until FORMAT or end + // Also check for commas to detect multiple users for !p.currentIs(token.EOF) && !p.currentIs(token.SEMICOLON) && !p.currentIs(token.FORMAT) { + if p.currentIs(token.COMMA) { + show.MultipleUsers = true + } p.nextToken() } // Parse FORMAT clause if present @@ -5589,6 +6264,8 @@ func (p *Parser) parseShow() ast.Statement { show.ShowType = ast.ShowDictionaries case "FUNCTIONS": show.ShowType = ast.ShowFunctions + case "SETTING": + show.ShowType = ast.ShowSetting case "INDEXES", "INDICES", "KEYS": // SHOW INDEXES/INDICES/KEYS FROM table - treat as ShowColumns show.ShowType = ast.ShowColumns @@ -6205,9 +6882,10 @@ func (p *Parser) parseAttach() *ast.AttachQuery { p.nextToken() // skip ATTACH - // Check for DATABASE, TABLE, or DICTIONARY keyword + // Check for DATABASE, TABLE, DICTIONARY, or MATERIALIZED VIEW keyword isDatabase := false isDictionary := false + isMaterializedView := false if p.currentIs(token.DATABASE) { isDatabase = true p.nextToken() @@ -6216,6 +6894,13 @@ func (p *Parser) parseAttach() *ast.AttachQuery { } else if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "DICTIONARY" { isDictionary = true p.nextToken() + } else if p.currentIs(token.MATERIALIZED) { + p.nextToken() + if p.currentIs(token.VIEW) { + isMaterializedView = true + attach.IsMaterializedView = true + p.nextToken() + } } // Parse name (can be qualified: database.table for TABLE, not for DATABASE/DICTIONARY) @@ -6232,6 +6917,32 @@ func (p *Parser) parseAttach() *ast.AttachQuery { attach.Table = name } + // Parse UUID clause (for ATTACH MATERIALIZED VIEW mv UUID 'uuid' ...) + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "UUID" { + p.nextToken() + if p.currentIs(token.STRING) { + attach.UUID = p.current.Value + p.nextToken() + } + } + + // Parse TO INNER UUID clause (for ATTACH MATERIALIZED VIEW mv UUID 'uuid' TO INNER UUID 'inner_uuid' ...) + if p.currentIs(token.TO) { + p.nextToken() + if p.currentIs(token.INNER) { + p.nextToken() + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "UUID" { + p.nextToken() + if p.currentIs(token.STRING) { + attach.InnerUUID = p.current.Value + p.nextToken() + } + } + } + } + + _ = isMaterializedView + // Parse column definitions for ATTACH TABLE name(col1 type, ...) if !isDatabase && p.currentIs(token.LPAREN) { p.nextToken() @@ -6286,9 +6997,14 @@ func (p *Parser) parseAttach() *ast.AttachQuery { attach.Engine = p.parseEngineClause() } - // Parse table options (ORDER BY, PRIMARY KEY) + // Parse table options (ORDER BY, PRIMARY KEY, PARTITION BY, AS SELECT) for { switch { + case p.currentIs(token.PARTITION): + p.nextToken() + if p.expect(token.BY) { + attach.PartitionBy = p.parseExpression(ALIAS_PREC) + } case p.currentIs(token.ORDER): p.nextToken() if p.expect(token.BY) { @@ -6331,6 +7047,12 @@ func (p *Parser) parseAttach() *ast.AttachQuery { attach.PrimaryKey = []ast.Expression{p.parseExpression(ALIAS_PREC)} } } + case p.currentIs(token.AS): + // AS SELECT clause for materialized views + p.nextToken() + if p.currentIs(token.SELECT) { + attach.SelectQuery = p.parseSelectWithUnion() + } default: return attach } @@ -6359,6 +7081,18 @@ func (p *Parser) parseCheck() *ast.CheckQuery { check.Table = tableName } + // Parse optional PARTITION clause + if p.currentIs(token.PARTITION) { + p.nextToken() // skip PARTITION + check.Partition = p.parseExpression(LOWEST) + } + + // Parse optional PART clause + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "PART" { + p.nextToken() // skip PART + check.Part = p.parseExpression(LOWEST) + } + // Parse optional FORMAT if p.currentIs(token.FORMAT) { p.nextToken() // skip FORMAT @@ -6426,6 +7160,15 @@ func (p *Parser) parseWindowDefinitions() []*ast.WindowDefinition { Position: p.current.Pos, } + // Check for named window reference (e.g., w1 as (w0 ORDER BY ...)) + if p.currentIs(token.IDENT) { + upper := strings.ToUpper(p.current.Value) + if upper != "PARTITION" && upper != "ORDER" && upper != "ROWS" && upper != "RANGE" && upper != "GROUPS" { + spec.Name = p.current.Value + p.nextToken() + } + } + // Parse PARTITION BY if p.currentIs(token.PARTITION) { p.nextToken() @@ -6769,6 +7512,12 @@ func (p *Parser) parseExistsStatement() *ast.ExistsQuery { p.nextToken() // skip EXISTS + // Check for TEMPORARY keyword + if p.currentIs(token.TEMPORARY) { + exists.Temporary = true + p.nextToken() + } + // Check for DICTIONARY, DATABASE, VIEW, or TABLE keyword if p.currentIs(token.TABLE) { exists.ExistsType = ast.ExistsTable @@ -6972,3 +7721,50 @@ func (p *Parser) parseTransactionControl() *ast.TransactionControlQuery { return query } + +// skipTTLModifiers skips TTL modifiers like RECOMPRESS CODEC(...), DELETE, TO DISK, TO VOLUME +func (p *Parser) skipTTLModifiers() { + for { + // Skip RECOMPRESS CODEC(...) + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "RECOMPRESS" { + p.nextToken() // skip RECOMPRESS + if p.currentIs(token.IDENT) && strings.ToUpper(p.current.Value) == "CODEC" { + p.nextToken() // skip CODEC + if p.currentIs(token.LPAREN) { + // Skip the entire CODEC(...) call + depth := 1 + p.nextToken() // skip ( + for depth > 0 && !p.currentIs(token.EOF) { + if p.currentIs(token.LPAREN) { + depth++ + } else if p.currentIs(token.RPAREN) { + depth-- + } + p.nextToken() + } + } + } + continue + } + // Skip DELETE (TTL ... DELETE) + if p.currentIs(token.DELETE) { + p.nextToken() + continue + } + // Skip TO DISK 'name' or TO VOLUME 'name' + if p.currentIs(token.TO) { + p.nextToken() + if p.currentIs(token.IDENT) { + upper := strings.ToUpper(p.current.Value) + if upper == "DISK" || upper == "VOLUME" { + p.nextToken() + if p.currentIs(token.STRING) { + p.nextToken() + } + continue + } + } + } + break + } +} diff --git a/parser/testdata/00132_sets/metadata.json b/parser/testdata/00132_sets/metadata.json index c45b7602ba..0967ef424b 100644 --- a/parser/testdata/00132_sets/metadata.json +++ b/parser/testdata/00132_sets/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt12": true - } -} +{} diff --git a/parser/testdata/00300_csv/metadata.json b/parser/testdata/00300_csv/metadata.json index f7c9a031b3..2c63c08510 100644 --- a/parser/testdata/00300_csv/metadata.json +++ b/parser/testdata/00300_csv/metadata.json @@ -1,7 +1,2 @@ { - "explain_todo": { - "stmt1": true, - "stmt2": true, - "stmt3": true - } } diff --git a/parser/testdata/00348_tuples/metadata.json b/parser/testdata/00348_tuples/metadata.json index c45b7602ba..0967ef424b 100644 --- a/parser/testdata/00348_tuples/metadata.json +++ b/parser/testdata/00348_tuples/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt12": true - } -} +{} diff --git a/parser/testdata/00405_output_format_pretty_color/metadata.json b/parser/testdata/00405_output_format_pretty_color/metadata.json index 13a7459d03..0967ef424b 100644 --- a/parser/testdata/00405_output_format_pretty_color/metadata.json +++ b/parser/testdata/00405_output_format_pretty_color/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt22": true, - "stmt4": true - } -} +{} diff --git a/parser/testdata/00492_drop_temporary_table/metadata.json b/parser/testdata/00492_drop_temporary_table/metadata.json index 92efb02376..0967ef424b 100644 --- a/parser/testdata/00492_drop_temporary_table/metadata.json +++ b/parser/testdata/00492_drop_temporary_table/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt6": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/00564_temporary_table_management/metadata.json b/parser/testdata/00564_temporary_table_management/metadata.json index d5e9483c45..2c63c08510 100644 --- a/parser/testdata/00564_temporary_table_management/metadata.json +++ b/parser/testdata/00564_temporary_table_management/metadata.json @@ -1,7 +1,2 @@ { - "explain_todo": { - "stmt3": true, - "stmt5": true, - "stmt7": true - } } diff --git a/parser/testdata/00653_running_difference/metadata.json b/parser/testdata/00653_running_difference/metadata.json index c86d2ec2f3..0967ef424b 100644 --- a/parser/testdata/00653_running_difference/metadata.json +++ b/parser/testdata/00653_running_difference/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt12": true, - "stmt14": true, - "stmt16": true - } -} +{} diff --git a/parser/testdata/00670_truncate_temporary_table/metadata.json b/parser/testdata/00670_truncate_temporary_table/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/00670_truncate_temporary_table/metadata.json +++ b/parser/testdata/00670_truncate_temporary_table/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/00700_decimal_null/metadata.json b/parser/testdata/00700_decimal_null/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/00700_decimal_null/metadata.json +++ b/parser/testdata/00700_decimal_null/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/00725_comment_columns_long/metadata.json b/parser/testdata/00725_comment_columns_long/metadata.json index 54ac61b35d..2da0074a29 100644 --- a/parser/testdata/00725_comment_columns_long/metadata.json +++ b/parser/testdata/00725_comment_columns_long/metadata.json @@ -1,9 +1,7 @@ { "explain_todo": { - "stmt13": true, "stmt19": true, "stmt21": true, - "stmt3": true, "stmt9": true } } diff --git a/parser/testdata/00727_concat/metadata.json b/parser/testdata/00727_concat/metadata.json index 898e7ce13d..ab953e9485 100644 --- a/parser/testdata/00727_concat/metadata.json +++ b/parser/testdata/00727_concat/metadata.json @@ -2,7 +2,6 @@ "explain_todo": { "stmt19": true, "stmt20": true, - "stmt44": true, - "stmt46": true + "stmt44": true } } diff --git a/parser/testdata/00753_alter_attach/metadata.json b/parser/testdata/00753_alter_attach/metadata.json index 72e6d5c7c6..0967ef424b 100644 --- a/parser/testdata/00753_alter_attach/metadata.json +++ b/parser/testdata/00753_alter_attach/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt53": true, - "stmt54": true, - "stmt55": true, - "stmt58": true - } -} +{} diff --git a/parser/testdata/00753_comment_columns_zookeeper/metadata.json b/parser/testdata/00753_comment_columns_zookeeper/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/00753_comment_columns_zookeeper/metadata.json +++ b/parser/testdata/00753_comment_columns_zookeeper/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/00821_distributed_storage_with_join_on/metadata.json b/parser/testdata/00821_distributed_storage_with_join_on/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/00821_distributed_storage_with_join_on/metadata.json +++ b/parser/testdata/00821_distributed_storage_with_join_on/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/00836_indices_alter/metadata.json b/parser/testdata/00836_indices_alter/metadata.json index 8f6d4ba033..0967ef424b 100644 --- a/parser/testdata/00836_indices_alter/metadata.json +++ b/parser/testdata/00836_indices_alter/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt5": true, - "stmt6": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/00836_indices_alter_replicated_zookeeper_long/metadata.json b/parser/testdata/00836_indices_alter_replicated_zookeeper_long/metadata.json index 03776c0ca6..0967ef424b 100644 --- a/parser/testdata/00836_indices_alter_replicated_zookeeper_long/metadata.json +++ b/parser/testdata/00836_indices_alter_replicated_zookeeper_long/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt11": true, - "stmt12": true, - "stmt35": true - } -} +{} diff --git a/parser/testdata/00915_simple_aggregate_function/metadata.json b/parser/testdata/00915_simple_aggregate_function/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/00915_simple_aggregate_function/metadata.json +++ b/parser/testdata/00915_simple_aggregate_function/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/00915_simple_aggregate_function_summing_merge_tree/metadata.json b/parser/testdata/00915_simple_aggregate_function_summing_merge_tree/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/00915_simple_aggregate_function_summing_merge_tree/metadata.json +++ b/parser/testdata/00915_simple_aggregate_function_summing_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/00921_datetime64_basic/metadata.json b/parser/testdata/00921_datetime64_basic/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/00921_datetime64_basic/metadata.json +++ b/parser/testdata/00921_datetime64_basic/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/00939_limit_by_offset/metadata.json b/parser/testdata/00939_limit_by_offset/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/00939_limit_by_offset/metadata.json +++ b/parser/testdata/00939_limit_by_offset/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/00952_part_frozen_info/metadata.json b/parser/testdata/00952_part_frozen_info/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/00952_part_frozen_info/metadata.json +++ b/parser/testdata/00952_part_frozen_info/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/00954_resample_combinator/metadata.json b/parser/testdata/00954_resample_combinator/metadata.json index f3c2ec2542..0967ef424b 100644 --- a/parser/testdata/00954_resample_combinator/metadata.json +++ b/parser/testdata/00954_resample_combinator/metadata.json @@ -1 +1 @@ -{"explain_todo":{"stmt12":true,"stmt16":true,"stmt4":true,"stmt8":true}} +{} diff --git a/parser/testdata/00955_test_final_mark/metadata.json b/parser/testdata/00955_test_final_mark/metadata.json index b330691357..0967ef424b 100644 --- a/parser/testdata/00955_test_final_mark/metadata.json +++ b/parser/testdata/00955_test_final_mark/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt26": true - } -} +{} diff --git a/parser/testdata/00961_check_table/metadata.json b/parser/testdata/00961_check_table/metadata.json index 5395f06a45..0967ef424b 100644 --- a/parser/testdata/00961_check_table/metadata.json +++ b/parser/testdata/00961_check_table/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt20": true - } -} +{} diff --git a/parser/testdata/00980_merge_alter_settings/metadata.json b/parser/testdata/00980_merge_alter_settings/metadata.json index 2dad206ca7..0967ef424b 100644 --- a/parser/testdata/00980_merge_alter_settings/metadata.json +++ b/parser/testdata/00980_merge_alter_settings/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt35": true, - "stmt42": true, - "stmt43": true, - "stmt46": true - } -} +{} diff --git a/parser/testdata/00980_zookeeper_merge_tree_alter_settings/metadata.json b/parser/testdata/00980_zookeeper_merge_tree_alter_settings/metadata.json index 0356b75394..0967ef424b 100644 --- a/parser/testdata/00980_zookeeper_merge_tree_alter_settings/metadata.json +++ b/parser/testdata/00980_zookeeper_merge_tree_alter_settings/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt58": true, - "stmt59": true, - "stmt60": true - } -} +{} diff --git a/parser/testdata/01000_bad_size_of_marks_skip_idx/metadata.json b/parser/testdata/01000_bad_size_of_marks_skip_idx/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/01000_bad_size_of_marks_skip_idx/metadata.json +++ b/parser/testdata/01000_bad_size_of_marks_skip_idx/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/01018_ddl_dictionaries_create/metadata.json b/parser/testdata/01018_ddl_dictionaries_create/metadata.json index aa5bbb6eb4..e9cc89b339 100644 --- a/parser/testdata/01018_ddl_dictionaries_create/metadata.json +++ b/parser/testdata/01018_ddl_dictionaries_create/metadata.json @@ -1,7 +1,6 @@ { "explain_todo": { "stmt17": true, - "stmt22": true, - "stmt43": true + "stmt22": true } } diff --git a/parser/testdata/01071_prohibition_secondary_index_with_old_format_merge_tree/metadata.json b/parser/testdata/01071_prohibition_secondary_index_with_old_format_merge_tree/metadata.json index bc5c6edb66..0967ef424b 100644 --- a/parser/testdata/01071_prohibition_secondary_index_with_old_format_merge_tree/metadata.json +++ b/parser/testdata/01071_prohibition_secondary_index_with_old_format_merge_tree/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt3": true, - "stmt5": true - } -} +{} diff --git a/parser/testdata/01109_exchange_tables/metadata.json b/parser/testdata/01109_exchange_tables/metadata.json index 2b8e98e7c1..6599b4f20b 100644 --- a/parser/testdata/01109_exchange_tables/metadata.json +++ b/parser/testdata/01109_exchange_tables/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt27": true, "stmt43": true, "stmt44": true } diff --git a/parser/testdata/01115_join_with_dictionary/metadata.json b/parser/testdata/01115_join_with_dictionary/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/01115_join_with_dictionary/metadata.json +++ b/parser/testdata/01115_join_with_dictionary/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/01125_dict_ddl_cannot_add_column/metadata.json b/parser/testdata/01125_dict_ddl_cannot_add_column/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/01125_dict_ddl_cannot_add_column/metadata.json +++ b/parser/testdata/01125_dict_ddl_cannot_add_column/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/01152_cross_replication/metadata.json b/parser/testdata/01152_cross_replication/metadata.json index 9a0b394b40..0967ef424b 100644 --- a/parser/testdata/01152_cross_replication/metadata.json +++ b/parser/testdata/01152_cross_replication/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt11": true, - "stmt12": true, - "stmt7": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/01153_attach_mv_uuid/metadata.json b/parser/testdata/01153_attach_mv_uuid/metadata.json index d3de3d16d5..0967ef424b 100644 --- a/parser/testdata/01153_attach_mv_uuid/metadata.json +++ b/parser/testdata/01153_attach_mv_uuid/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt16": true, - "stmt26": true, - "stmt27": true, - "stmt37": true - } -} +{} diff --git a/parser/testdata/01213_alter_rename_primary_key_zookeeper_long/metadata.json b/parser/testdata/01213_alter_rename_primary_key_zookeeper_long/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/01213_alter_rename_primary_key_zookeeper_long/metadata.json +++ b/parser/testdata/01213_alter_rename_primary_key_zookeeper_long/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/01257_dictionary_mismatch_types/metadata.json b/parser/testdata/01257_dictionary_mismatch_types/metadata.json index 5e06643b76..c45b7602ba 100644 --- a/parser/testdata/01257_dictionary_mismatch_types/metadata.json +++ b/parser/testdata/01257_dictionary_mismatch_types/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt12": true, - "stmt6": true + "stmt12": true } } diff --git a/parser/testdata/01292_create_user/metadata.json b/parser/testdata/01292_create_user/metadata.json index 0c27273b2d..3647a83c62 100644 --- a/parser/testdata/01292_create_user/metadata.json +++ b/parser/testdata/01292_create_user/metadata.json @@ -1,9 +1,5 @@ { "explain_todo": { - "stmt196": true, - "stmt197": true, - "stmt201": true, - "stmt221": true, "stmt239": true } } diff --git a/parser/testdata/01325_freeze_mutation_stuck/metadata.json b/parser/testdata/01325_freeze_mutation_stuck/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/01325_freeze_mutation_stuck/metadata.json +++ b/parser/testdata/01325_freeze_mutation_stuck/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/01392_column_resolve/metadata.json b/parser/testdata/01392_column_resolve/metadata.json index 682bda1cbc..0967ef424b 100644 --- a/parser/testdata/01392_column_resolve/metadata.json +++ b/parser/testdata/01392_column_resolve/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt1": true, - "stmt2": true - } -} +{} diff --git a/parser/testdata/01412_optimize_deduplicate_bug/metadata.json b/parser/testdata/01412_optimize_deduplicate_bug/metadata.json index 8556c3021f..0967ef424b 100644 --- a/parser/testdata/01412_optimize_deduplicate_bug/metadata.json +++ b/parser/testdata/01412_optimize_deduplicate_bug/metadata.json @@ -1 +1 @@ -{"explain_todo":{"stmt5":true}} +{} diff --git a/parser/testdata/01414_freeze_does_not_prevent_alters/metadata.json b/parser/testdata/01414_freeze_does_not_prevent_alters/metadata.json index 81d5c187a5..0967ef424b 100644 --- a/parser/testdata/01414_freeze_does_not_prevent_alters/metadata.json +++ b/parser/testdata/01414_freeze_does_not_prevent_alters/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt19": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/01455_default_compression/metadata.json b/parser/testdata/01455_default_compression/metadata.json index 546ade0b45..0967ef424b 100644 --- a/parser/testdata/01455_default_compression/metadata.json +++ b/parser/testdata/01455_default_compression/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt2": true, - "stmt5": true - } -} +{} diff --git a/parser/testdata/01465_ttl_recompression/metadata.json b/parser/testdata/01465_ttl_recompression/metadata.json index e1bc1f2f25..0967ef424b 100644 --- a/parser/testdata/01465_ttl_recompression/metadata.json +++ b/parser/testdata/01465_ttl_recompression/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt12": true, - "stmt2": true, - "stmt20": true, - "stmt26": true - } -} +{} diff --git a/parser/testdata/01470_test_insert_select_asterisk/metadata.json b/parser/testdata/01470_test_insert_select_asterisk/metadata.json index 05747ff9e9..0967ef424b 100644 --- a/parser/testdata/01470_test_insert_select_asterisk/metadata.json +++ b/parser/testdata/01470_test_insert_select_asterisk/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt6": true, - "stmt7": true, - "stmt8": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/01493_alter_remove_properties/metadata.json b/parser/testdata/01493_alter_remove_properties/metadata.json index 182f4d0e03..7974f6a182 100644 --- a/parser/testdata/01493_alter_remove_properties/metadata.json +++ b/parser/testdata/01493_alter_remove_properties/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt2": true, "stmt24": true } } diff --git a/parser/testdata/01493_alter_remove_properties_zookeeper/metadata.json b/parser/testdata/01493_alter_remove_properties_zookeeper/metadata.json index c0a6c41fa3..85cc99e9fa 100644 --- a/parser/testdata/01493_alter_remove_properties_zookeeper/metadata.json +++ b/parser/testdata/01493_alter_remove_properties_zookeeper/metadata.json @@ -1,7 +1,5 @@ { "explain_todo": { - "stmt34": true, - "stmt4": true, - "stmt5": true + "stmt34": true } } diff --git a/parser/testdata/01504_compression_multiple_streams/metadata.json b/parser/testdata/01504_compression_multiple_streams/metadata.json index 07df712607..0967ef424b 100644 --- a/parser/testdata/01504_compression_multiple_streams/metadata.json +++ b/parser/testdata/01504_compression_multiple_streams/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt20": true, - "stmt3": true, - "stmt30": true - } -} +{} diff --git a/parser/testdata/01526_complex_key_dict_direct_layout/metadata.json b/parser/testdata/01526_complex_key_dict_direct_layout/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/01526_complex_key_dict_direct_layout/metadata.json +++ b/parser/testdata/01526_complex_key_dict_direct_layout/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/01576_alias_column_rewrite/metadata.json b/parser/testdata/01576_alias_column_rewrite/metadata.json index 95cd2c2b48..0967ef424b 100644 --- a/parser/testdata/01576_alias_column_rewrite/metadata.json +++ b/parser/testdata/01576_alias_column_rewrite/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt42": true - } -} +{} diff --git a/parser/testdata/01581_deduplicate_by_columns_replicated_long/metadata.json b/parser/testdata/01581_deduplicate_by_columns_replicated_long/metadata.json index 19bb773cd3..0967ef424b 100644 --- a/parser/testdata/01581_deduplicate_by_columns_replicated_long/metadata.json +++ b/parser/testdata/01581_deduplicate_by_columns_replicated_long/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt17": true, - "stmt18": true, - "stmt19": true - } -} +{} diff --git a/parser/testdata/01591_window_functions/metadata.json b/parser/testdata/01591_window_functions/metadata.json index 24cf4ae23e..2c63c08510 100644 --- a/parser/testdata/01591_window_functions/metadata.json +++ b/parser/testdata/01591_window_functions/metadata.json @@ -1,8 +1,2 @@ { - "explain_todo": { - "stmt107": true, - "stmt108": true, - "stmt109": true, - "stmt110": true - } } diff --git a/parser/testdata/01632_select_all_syntax/metadata.json b/parser/testdata/01632_select_all_syntax/metadata.json index 91296165a7..0967ef424b 100644 --- a/parser/testdata/01632_select_all_syntax/metadata.json +++ b/parser/testdata/01632_select_all_syntax/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt11": true, - "stmt15": true, - "stmt6": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/01651_map_functions/metadata.json b/parser/testdata/01651_map_functions/metadata.json index 8298ab280a..0967ef424b 100644 --- a/parser/testdata/01651_map_functions/metadata.json +++ b/parser/testdata/01651_map_functions/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt55": true - } -} +{} diff --git a/parser/testdata/01666_gcd_ubsan/metadata.json b/parser/testdata/01666_gcd_ubsan/metadata.json index bd1b4cdd81..0967ef424b 100644 --- a/parser/testdata/01666_gcd_ubsan/metadata.json +++ b/parser/testdata/01666_gcd_ubsan/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt6": true, - "stmt7": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/01666_lcm_ubsan/metadata.json b/parser/testdata/01666_lcm_ubsan/metadata.json index bd1b4cdd81..0967ef424b 100644 --- a/parser/testdata/01666_lcm_ubsan/metadata.json +++ b/parser/testdata/01666_lcm_ubsan/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt6": true, - "stmt7": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/01670_dictionary_create_key_expression/metadata.json b/parser/testdata/01670_dictionary_create_key_expression/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/01670_dictionary_create_key_expression/metadata.json +++ b/parser/testdata/01670_dictionary_create_key_expression/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/01676_range_hashed_dictionary/metadata.json b/parser/testdata/01676_range_hashed_dictionary/metadata.json index 00ef09ecbd..3a06a4a1ac 100644 --- a/parser/testdata/01676_range_hashed_dictionary/metadata.json +++ b/parser/testdata/01676_range_hashed_dictionary/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt33": true, "stmt5": true } } diff --git a/parser/testdata/01682_cache_dictionary_complex_key/metadata.json b/parser/testdata/01682_cache_dictionary_complex_key/metadata.json index fb2892e30a..0967ef424b 100644 --- a/parser/testdata/01682_cache_dictionary_complex_key/metadata.json +++ b/parser/testdata/01682_cache_dictionary_complex_key/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt27": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/01705_normalize_create_alter_function_names/metadata.json b/parser/testdata/01705_normalize_create_alter_function_names/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/01705_normalize_create_alter_function_names/metadata.json +++ b/parser/testdata/01705_normalize_create_alter_function_names/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/01748_dictionary_table_dot/metadata.json b/parser/testdata/01748_dictionary_table_dot/metadata.json index afaaa4b0a6..7ad5569408 100644 --- a/parser/testdata/01748_dictionary_table_dot/metadata.json +++ b/parser/testdata/01748_dictionary_table_dot/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt7": true, "stmt9": true } } diff --git a/parser/testdata/01754_direct_dictionary_complex_key/metadata.json b/parser/testdata/01754_direct_dictionary_complex_key/metadata.json index fb2892e30a..0967ef424b 100644 --- a/parser/testdata/01754_direct_dictionary_complex_key/metadata.json +++ b/parser/testdata/01754_direct_dictionary_complex_key/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt27": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/01760_system_dictionaries/metadata.json b/parser/testdata/01760_system_dictionaries/metadata.json index 5395f06a45..0967ef424b 100644 --- a/parser/testdata/01760_system_dictionaries/metadata.json +++ b/parser/testdata/01760_system_dictionaries/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt20": true - } -} +{} diff --git a/parser/testdata/01766_hashed_dictionary_complex_key/metadata.json b/parser/testdata/01766_hashed_dictionary_complex_key/metadata.json index fb2892e30a..0967ef424b 100644 --- a/parser/testdata/01766_hashed_dictionary_complex_key/metadata.json +++ b/parser/testdata/01766_hashed_dictionary_complex_key/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt27": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/01780_dict_get_or_null/metadata.json b/parser/testdata/01780_dict_get_or_null/metadata.json index 28fe8b10d5..0967ef424b 100644 --- a/parser/testdata/01780_dict_get_or_null/metadata.json +++ b/parser/testdata/01780_dict_get_or_null/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt18": true, - "stmt29": true - } -} +{} diff --git a/parser/testdata/01785_dictionary_element_count/metadata.json b/parser/testdata/01785_dictionary_element_count/metadata.json index b09bea8db0..0967ef424b 100644 --- a/parser/testdata/01785_dictionary_element_count/metadata.json +++ b/parser/testdata/01785_dictionary_element_count/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt25": true - } -} +{} diff --git a/parser/testdata/01838_system_dictionaries_virtual_key_column/metadata.json b/parser/testdata/01838_system_dictionaries_virtual_key_column/metadata.json index dbdbb76d4f..0967ef424b 100644 --- a/parser/testdata/01838_system_dictionaries_virtual_key_column/metadata.json +++ b/parser/testdata/01838_system_dictionaries_virtual_key_column/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt6": true - } -} +{} diff --git a/parser/testdata/01852_cast_operator/metadata.json b/parser/testdata/01852_cast_operator/metadata.json index a38745fbb0..0967ef424b 100644 --- a/parser/testdata/01852_cast_operator/metadata.json +++ b/parser/testdata/01852_cast_operator/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt11": true, - "stmt12": true, - "stmt29": true - } -} +{} diff --git a/parser/testdata/01852_cast_operator_4/metadata.json b/parser/testdata/01852_cast_operator_4/metadata.json index b19e612cd1..0967ef424b 100644 --- a/parser/testdata/01852_cast_operator_4/metadata.json +++ b/parser/testdata/01852_cast_operator_4/metadata.json @@ -1 +1 @@ -{"explain_todo":{"stmt4":true,"stmt5":true,"stmt6":true,"stmt7":true}} +{} diff --git a/parser/testdata/01852_dictionary_query_count_long/metadata.json b/parser/testdata/01852_dictionary_query_count_long/metadata.json index c779e34356..2c63c08510 100644 --- a/parser/testdata/01852_dictionary_query_count_long/metadata.json +++ b/parser/testdata/01852_dictionary_query_count_long/metadata.json @@ -1,8 +1,2 @@ { - "explain_todo": { - "stmt54": true, - "stmt62": true, - "stmt70": true, - "stmt83": true - } } diff --git a/parser/testdata/01854_dictionary_range_hashed_min_max_attr/metadata.json b/parser/testdata/01854_dictionary_range_hashed_min_max_attr/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/01854_dictionary_range_hashed_min_max_attr/metadata.json +++ b/parser/testdata/01854_dictionary_range_hashed_min_max_attr/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/01881_negate_formatting/metadata.json b/parser/testdata/01881_negate_formatting/metadata.json index 763e0697ae..0967ef424b 100644 --- a/parser/testdata/01881_negate_formatting/metadata.json +++ b/parser/testdata/01881_negate_formatting/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt2": true, - "stmt3": true, - "stmt4": true, - "stmt6": true - } -} +{} diff --git a/parser/testdata/01883_with_grouping_sets/metadata.json b/parser/testdata/01883_with_grouping_sets/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/01883_with_grouping_sets/metadata.json +++ b/parser/testdata/01883_with_grouping_sets/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/01902_dictionary_array_type/metadata.json b/parser/testdata/01902_dictionary_array_type/metadata.json index 28a683eda9..0967ef424b 100644 --- a/parser/testdata/01902_dictionary_array_type/metadata.json +++ b/parser/testdata/01902_dictionary_array_type/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt58": true - } -} +{} diff --git a/parser/testdata/01904_dictionary_default_nullable_type/metadata.json b/parser/testdata/01904_dictionary_default_nullable_type/metadata.json index ca4ce64f93..0967ef424b 100644 --- a/parser/testdata/01904_dictionary_default_nullable_type/metadata.json +++ b/parser/testdata/01904_dictionary_default_nullable_type/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt78": true - } -} +{} diff --git a/parser/testdata/01932_alter_index_with_order/metadata.json b/parser/testdata/01932_alter_index_with_order/metadata.json index 19830977ac..0f293987f1 100644 --- a/parser/testdata/01932_alter_index_with_order/metadata.json +++ b/parser/testdata/01932_alter_index_with_order/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt4": true, "stmt5": true, "stmt6": true } diff --git a/parser/testdata/02001_select_with_filter/metadata.json b/parser/testdata/02001_select_with_filter/metadata.json index 6ed702cc94..0967ef424b 100644 --- a/parser/testdata/02001_select_with_filter/metadata.json +++ b/parser/testdata/02001_select_with_filter/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt1": true, - "stmt2": true, - "stmt3": true, - "stmt4": true - } -} +{} diff --git a/parser/testdata/02008_complex_key_range_hashed_dictionary/metadata.json b/parser/testdata/02008_complex_key_range_hashed_dictionary/metadata.json index 5d573a9c5d..0967ef424b 100644 --- a/parser/testdata/02008_complex_key_range_hashed_dictionary/metadata.json +++ b/parser/testdata/02008_complex_key_range_hashed_dictionary/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt35": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/02025_nested_func_for_if_combinator/metadata.json b/parser/testdata/02025_nested_func_for_if_combinator/metadata.json index 7b4ddafa53..0967ef424b 100644 --- a/parser/testdata/02025_nested_func_for_if_combinator/metadata.json +++ b/parser/testdata/02025_nested_func_for_if_combinator/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt3": true, - "stmt4": true - } -} +{} diff --git a/parser/testdata/02026_describe_include_subcolumns/metadata.json b/parser/testdata/02026_describe_include_subcolumns/metadata.json index bc5c6edb66..3a06a4a1ac 100644 --- a/parser/testdata/02026_describe_include_subcolumns/metadata.json +++ b/parser/testdata/02026_describe_include_subcolumns/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt3": true, "stmt5": true } } diff --git a/parser/testdata/02028_system_data_skipping_indices_size/metadata.json b/parser/testdata/02028_system_data_skipping_indices_size/metadata.json index dbdbb76d4f..0967ef424b 100644 --- a/parser/testdata/02028_system_data_skipping_indices_size/metadata.json +++ b/parser/testdata/02028_system_data_skipping_indices_size/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt6": true - } -} +{} diff --git a/parser/testdata/02067_lost_part_s3/metadata.json b/parser/testdata/02067_lost_part_s3/metadata.json index 342b3ff5b4..0967ef424b 100644 --- a/parser/testdata/02067_lost_part_s3/metadata.json +++ b/parser/testdata/02067_lost_part_s3/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt8": true - } -} +{} diff --git a/parser/testdata/02097_remove_sample_by/metadata.json b/parser/testdata/02097_remove_sample_by/metadata.json index 7b4455cd5f..0967ef424b 100644 --- a/parser/testdata/02097_remove_sample_by/metadata.json +++ b/parser/testdata/02097_remove_sample_by/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt16": true - } -} +{} diff --git a/parser/testdata/02162_range_hashed_dictionary_ddl_expression/metadata.json b/parser/testdata/02162_range_hashed_dictionary_ddl_expression/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/02162_range_hashed_dictionary_ddl_expression/metadata.json +++ b/parser/testdata/02162_range_hashed_dictionary_ddl_expression/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/02169_map_functions/metadata.json b/parser/testdata/02169_map_functions/metadata.json index b09f4864be..0967ef424b 100644 --- a/parser/testdata/02169_map_functions/metadata.json +++ b/parser/testdata/02169_map_functions/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt65": true - } -} +{} diff --git a/parser/testdata/02176_dict_get_has_implicit_key_cast/metadata.json b/parser/testdata/02176_dict_get_has_implicit_key_cast/metadata.json index aa28559472..0967ef424b 100644 --- a/parser/testdata/02176_dict_get_has_implicit_key_cast/metadata.json +++ b/parser/testdata/02176_dict_get_has_implicit_key_cast/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt18": true - } -} +{} diff --git a/parser/testdata/02179_range_hashed_dictionary_invalid_interval/metadata.json b/parser/testdata/02179_range_hashed_dictionary_invalid_interval/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/02179_range_hashed_dictionary_invalid_interval/metadata.json +++ b/parser/testdata/02179_range_hashed_dictionary_invalid_interval/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/02183_dictionary_date_types/metadata.json b/parser/testdata/02183_dictionary_date_types/metadata.json index 8298ab280a..0967ef424b 100644 --- a/parser/testdata/02183_dictionary_date_types/metadata.json +++ b/parser/testdata/02183_dictionary_date_types/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt55": true - } -} +{} diff --git a/parser/testdata/02183_dictionary_no_attributes/metadata.json b/parser/testdata/02183_dictionary_no_attributes/metadata.json index be6c39b1cf..0967ef424b 100644 --- a/parser/testdata/02183_dictionary_no_attributes/metadata.json +++ b/parser/testdata/02183_dictionary_no_attributes/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt73": true - } -} +{} diff --git a/parser/testdata/02184_range_hashed_dictionary_outside_range_values/metadata.json b/parser/testdata/02184_range_hashed_dictionary_outside_range_values/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/02184_range_hashed_dictionary_outside_range_values/metadata.json +++ b/parser/testdata/02184_range_hashed_dictionary_outside_range_values/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/02185_range_hashed_dictionary_open_ranges/metadata.json b/parser/testdata/02185_range_hashed_dictionary_open_ranges/metadata.json index 6dc0aa1ce2..0967ef424b 100644 --- a/parser/testdata/02185_range_hashed_dictionary_open_ranges/metadata.json +++ b/parser/testdata/02185_range_hashed_dictionary_open_ranges/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt17": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/02186_range_hashed_dictionary_intersecting_intervals/metadata.json b/parser/testdata/02186_range_hashed_dictionary_intersecting_intervals/metadata.json index bd82208299..0967ef424b 100644 --- a/parser/testdata/02186_range_hashed_dictionary_intersecting_intervals/metadata.json +++ b/parser/testdata/02186_range_hashed_dictionary_intersecting_intervals/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt16": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/02188_parser_dictionary_primary_key/metadata.json b/parser/testdata/02188_parser_dictionary_primary_key/metadata.json index d4d1d99f95..0967ef424b 100644 --- a/parser/testdata/02188_parser_dictionary_primary_key/metadata.json +++ b/parser/testdata/02188_parser_dictionary_primary_key/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt14": true - } -} +{} diff --git a/parser/testdata/02234_column_function_short_circuit/metadata.json b/parser/testdata/02234_column_function_short_circuit/metadata.json index dbdbb76d4f..0967ef424b 100644 --- a/parser/testdata/02234_column_function_short_circuit/metadata.json +++ b/parser/testdata/02234_column_function_short_circuit/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt6": true - } -} +{} diff --git a/parser/testdata/02252_reset_non_existing_setting/metadata.json b/parser/testdata/02252_reset_non_existing_setting/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/02252_reset_non_existing_setting/metadata.json +++ b/parser/testdata/02252_reset_non_existing_setting/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/02286_quantile_tdigest_infinity/metadata.json b/parser/testdata/02286_quantile_tdigest_infinity/metadata.json index c211f242cc..0967ef424b 100644 --- a/parser/testdata/02286_quantile_tdigest_infinity/metadata.json +++ b/parser/testdata/02286_quantile_tdigest_infinity/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt22": true, - "stmt23": true, - "stmt24": true - } -} +{} diff --git a/parser/testdata/02286_tuple_numeric_identifier/metadata.json b/parser/testdata/02286_tuple_numeric_identifier/metadata.json index 2ef6a38008..0967ef424b 100644 --- a/parser/testdata/02286_tuple_numeric_identifier/metadata.json +++ b/parser/testdata/02286_tuple_numeric_identifier/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt15": true, - "stmt8": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/02293_grouping_function/metadata.json b/parser/testdata/02293_grouping_function/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/02293_grouping_function/metadata.json +++ b/parser/testdata/02293_grouping_function/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/02311_range_hashed_dictionary_range_cast/metadata.json b/parser/testdata/02311_range_hashed_dictionary_range_cast/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/02311_range_hashed_dictionary_range_cast/metadata.json +++ b/parser/testdata/02311_range_hashed_dictionary_range_cast/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/02315_grouping_constant_folding/metadata.json b/parser/testdata/02315_grouping_constant_folding/metadata.json index 943b275814..0967ef424b 100644 --- a/parser/testdata/02315_grouping_constant_folding/metadata.json +++ b/parser/testdata/02315_grouping_constant_folding/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt4": true, - "stmt6": true - } -} +{} diff --git a/parser/testdata/02319_dict_get_check_arguments_size/metadata.json b/parser/testdata/02319_dict_get_check_arguments_size/metadata.json index 62b81668c3..0967ef424b 100644 --- a/parser/testdata/02319_dict_get_check_arguments_size/metadata.json +++ b/parser/testdata/02319_dict_get_check_arguments_size/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt13": true - } -} +{} diff --git a/parser/testdata/02346_exclude_materialize_skip_indexes_on_insert/metadata.json b/parser/testdata/02346_exclude_materialize_skip_indexes_on_insert/metadata.json index dbdbb76d4f..0967ef424b 100644 --- a/parser/testdata/02346_exclude_materialize_skip_indexes_on_insert/metadata.json +++ b/parser/testdata/02346_exclude_materialize_skip_indexes_on_insert/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt6": true - } -} +{} diff --git a/parser/testdata/02346_exclude_materialize_skip_indexes_on_merge/metadata.json b/parser/testdata/02346_exclude_materialize_skip_indexes_on_merge/metadata.json index ab9202e88e..0967ef424b 100644 --- a/parser/testdata/02346_exclude_materialize_skip_indexes_on_merge/metadata.json +++ b/parser/testdata/02346_exclude_materialize_skip_indexes_on_merge/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt11": true - } -} +{} diff --git a/parser/testdata/02346_text_index_experimental_flag/metadata.json b/parser/testdata/02346_text_index_experimental_flag/metadata.json index f649a7df09..0967ef424b 100644 --- a/parser/testdata/02346_text_index_experimental_flag/metadata.json +++ b/parser/testdata/02346_text_index_experimental_flag/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/02346_text_index_function_hasAnyAllTokens_partially_materialized/metadata.json b/parser/testdata/02346_text_index_function_hasAnyAllTokens_partially_materialized/metadata.json index 342b3ff5b4..0967ef424b 100644 --- a/parser/testdata/02346_text_index_function_hasAnyAllTokens_partially_materialized/metadata.json +++ b/parser/testdata/02346_text_index_function_hasAnyAllTokens_partially_materialized/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt8": true - } -} +{} diff --git a/parser/testdata/02346_text_index_hits/metadata.json b/parser/testdata/02346_text_index_hits/metadata.json index 0f293987f1..0967ef424b 100644 --- a/parser/testdata/02346_text_index_hits/metadata.json +++ b/parser/testdata/02346_text_index_hits/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt5": true, - "stmt6": true - } -} +{} diff --git a/parser/testdata/02354_vector_search_postfiltering_bug/metadata.json b/parser/testdata/02354_vector_search_postfiltering_bug/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/02354_vector_search_postfiltering_bug/metadata.json +++ b/parser/testdata/02354_vector_search_postfiltering_bug/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/02354_vector_search_rescoring/metadata.json b/parser/testdata/02354_vector_search_rescoring/metadata.json index 6f9b7382d4..0967ef424b 100644 --- a/parser/testdata/02354_vector_search_rescoring/metadata.json +++ b/parser/testdata/02354_vector_search_rescoring/metadata.json @@ -1,9 +1 @@ -{ - "explain_todo": { - "stmt12": true, - "stmt16": true, - "stmt19": true, - "stmt25": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/02354_vector_search_rescoring_and_prewhere/metadata.json b/parser/testdata/02354_vector_search_rescoring_and_prewhere/metadata.json index fdd68b9a1d..0967ef424b 100644 --- a/parser/testdata/02354_vector_search_rescoring_and_prewhere/metadata.json +++ b/parser/testdata/02354_vector_search_rescoring_and_prewhere/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt16": true, - "stmt22": true - } -} +{} diff --git a/parser/testdata/02378_analyzer_projection_names/metadata.json b/parser/testdata/02378_analyzer_projection_names/metadata.json index 6256fd41ef..277764f7c2 100644 --- a/parser/testdata/02378_analyzer_projection_names/metadata.json +++ b/parser/testdata/02378_analyzer_projection_names/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt185": true, "stmt67": true } } diff --git a/parser/testdata/02391_hashed_dictionary_shards/metadata.json b/parser/testdata/02391_hashed_dictionary_shards/metadata.json index 7974f6a182..0967ef424b 100644 --- a/parser/testdata/02391_hashed_dictionary_shards/metadata.json +++ b/parser/testdata/02391_hashed_dictionary_shards/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt24": true - } -} +{} diff --git a/parser/testdata/02416_grouping_function_compatibility/metadata.json b/parser/testdata/02416_grouping_function_compatibility/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/02416_grouping_function_compatibility/metadata.json +++ b/parser/testdata/02416_grouping_function_compatibility/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/02417_from_select_syntax/metadata.json b/parser/testdata/02417_from_select_syntax/metadata.json index fffcb7d38b..2c63c08510 100644 --- a/parser/testdata/02417_from_select_syntax/metadata.json +++ b/parser/testdata/02417_from_select_syntax/metadata.json @@ -1,7 +1,2 @@ { - "explain_todo": { - "stmt2": true, - "stmt3": true, - "stmt4": true - } } diff --git a/parser/testdata/02428_delete_with_settings/metadata.json b/parser/testdata/02428_delete_with_settings/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/02428_delete_with_settings/metadata.json +++ b/parser/testdata/02428_delete_with_settings/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/02455_improve_feedback_when_replacing_partition_with_different_primary_key/metadata.json b/parser/testdata/02455_improve_feedback_when_replacing_partition_with_different_primary_key/metadata.json index e9d6e46171..0967ef424b 100644 --- a/parser/testdata/02455_improve_feedback_when_replacing_partition_with_different_primary_key/metadata.json +++ b/parser/testdata/02455_improve_feedback_when_replacing_partition_with_different_primary_key/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt1": true - } -} +{} diff --git a/parser/testdata/02457_tuple_of_intervals/metadata.json b/parser/testdata/02457_tuple_of_intervals/metadata.json index 60bbc44971..0967ef424b 100644 --- a/parser/testdata/02457_tuple_of_intervals/metadata.json +++ b/parser/testdata/02457_tuple_of_intervals/metadata.json @@ -1,9 +1 @@ -{ - "explain_todo": { - "stmt34": true, - "stmt35": true, - "stmt36": true, - "stmt37": true, - "stmt38": true - } -} +{} diff --git a/parser/testdata/02478_projection_and_alter_low_cardinality/metadata.json b/parser/testdata/02478_projection_and_alter_low_cardinality/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/02478_projection_and_alter_low_cardinality/metadata.json +++ b/parser/testdata/02478_projection_and_alter_low_cardinality/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/02487_create_index_normalize_functions/metadata.json b/parser/testdata/02487_create_index_normalize_functions/metadata.json index bc141058a4..1295a45747 100644 --- a/parser/testdata/02487_create_index_normalize_functions/metadata.json +++ b/parser/testdata/02487_create_index_normalize_functions/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt2": true, "stmt3": true } } diff --git a/parser/testdata/02525_range_hashed_dictionary_update_field/metadata.json b/parser/testdata/02525_range_hashed_dictionary_update_field/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/02525_range_hashed_dictionary_update_field/metadata.json +++ b/parser/testdata/02525_range_hashed_dictionary_update_field/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/02581_share_big_sets_between_multiple_mutations_tasks_long/metadata.json b/parser/testdata/02581_share_big_sets_between_multiple_mutations_tasks_long/metadata.json index 7b4455cd5f..0967ef424b 100644 --- a/parser/testdata/02581_share_big_sets_between_multiple_mutations_tasks_long/metadata.json +++ b/parser/testdata/02581_share_big_sets_between_multiple_mutations_tasks_long/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt16": true - } -} +{} diff --git a/parser/testdata/02681_undrop_query/metadata.json b/parser/testdata/02681_undrop_query/metadata.json index 3457d8d58d..0967ef424b 100644 --- a/parser/testdata/02681_undrop_query/metadata.json +++ b/parser/testdata/02681_undrop_query/metadata.json @@ -1,9 +1 @@ -{ - "explain_todo": { - "stmt22": true, - "stmt27": true, - "stmt31": true, - "stmt36": true, - "stmt38": true - } -} +{} diff --git a/parser/testdata/02706_show_columns/metadata.json b/parser/testdata/02706_show_columns/metadata.json index 6fb75fafb6..25122ac4f4 100644 --- a/parser/testdata/02706_show_columns/metadata.json +++ b/parser/testdata/02706_show_columns/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt2": true, "stmt5": true, "stmt9": true } diff --git a/parser/testdata/02716_drop_if_empty/metadata.json b/parser/testdata/02716_drop_if_empty/metadata.json index 31cb028fb1..0967ef424b 100644 --- a/parser/testdata/02716_drop_if_empty/metadata.json +++ b/parser/testdata/02716_drop_if_empty/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt11": true, - "stmt13": true - } -} +{} diff --git a/parser/testdata/02717_pretty_json/metadata.json b/parser/testdata/02717_pretty_json/metadata.json index e9d6e46171..0967ef424b 100644 --- a/parser/testdata/02717_pretty_json/metadata.json +++ b/parser/testdata/02717_pretty_json/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt1": true - } -} +{} diff --git a/parser/testdata/02731_auto_convert_dictionary_layout_to_complex_by_complex_keys/metadata.json b/parser/testdata/02731_auto_convert_dictionary_layout_to_complex_by_complex_keys/metadata.json index 7974f6a182..0967ef424b 100644 --- a/parser/testdata/02731_auto_convert_dictionary_layout_to_complex_by_complex_keys/metadata.json +++ b/parser/testdata/02731_auto_convert_dictionary_layout_to_complex_by_complex_keys/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt24": true - } -} +{} diff --git a/parser/testdata/02735_parquet_encoder/metadata.json b/parser/testdata/02735_parquet_encoder/metadata.json index fd942e39e1..0967ef424b 100644 --- a/parser/testdata/02735_parquet_encoder/metadata.json +++ b/parser/testdata/02735_parquet_encoder/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt70": true, - "stmt72": true, - "stmt74": true - } -} +{} diff --git a/parser/testdata/02740_hashed_dictionary_load_factor_smoke/metadata.json b/parser/testdata/02740_hashed_dictionary_load_factor_smoke/metadata.json index 05f2588d5d..0967ef424b 100644 --- a/parser/testdata/02740_hashed_dictionary_load_factor_smoke/metadata.json +++ b/parser/testdata/02740_hashed_dictionary_load_factor_smoke/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt31": true - } -} +{} diff --git a/parser/testdata/02775_show_columns_called_from_clickhouse/metadata.json b/parser/testdata/02775_show_columns_called_from_clickhouse/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/02775_show_columns_called_from_clickhouse/metadata.json +++ b/parser/testdata/02775_show_columns_called_from_clickhouse/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/02790_fix_coredump_when_compile_expression/metadata.json b/parser/testdata/02790_fix_coredump_when_compile_expression/metadata.json index e9d6e46171..0967ef424b 100644 --- a/parser/testdata/02790_fix_coredump_when_compile_expression/metadata.json +++ b/parser/testdata/02790_fix_coredump_when_compile_expression/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt1": true - } -} +{} diff --git a/parser/testdata/02791_final_block_structure_mismatch_bug/metadata.json b/parser/testdata/02791_final_block_structure_mismatch_bug/metadata.json index 23f93c7c50..0967ef424b 100644 --- a/parser/testdata/02791_final_block_structure_mismatch_bug/metadata.json +++ b/parser/testdata/02791_final_block_structure_mismatch_bug/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt14": true, - "stmt26": true - } -} +{} diff --git a/parser/testdata/02798_explain_settings_not_applied_bug/metadata.json b/parser/testdata/02798_explain_settings_not_applied_bug/metadata.json index 342b3ff5b4..0967ef424b 100644 --- a/parser/testdata/02798_explain_settings_not_applied_bug/metadata.json +++ b/parser/testdata/02798_explain_settings_not_applied_bug/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt8": true - } -} +{} diff --git a/parser/testdata/02815_range_dict_no_direct_join/metadata.json b/parser/testdata/02815_range_dict_no_direct_join/metadata.json index 342b3ff5b4..0967ef424b 100644 --- a/parser/testdata/02815_range_dict_no_direct_join/metadata.json +++ b/parser/testdata/02815_range_dict_no_direct_join/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt8": true - } -} +{} diff --git a/parser/testdata/02835_nested_array_lowcardinality/metadata.json b/parser/testdata/02835_nested_array_lowcardinality/metadata.json index 62b81668c3..0967ef424b 100644 --- a/parser/testdata/02835_nested_array_lowcardinality/metadata.json +++ b/parser/testdata/02835_nested_array_lowcardinality/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt13": true - } -} +{} diff --git a/parser/testdata/02841_parallel_final_wrong_columns_order/metadata.json b/parser/testdata/02841_parallel_final_wrong_columns_order/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/02841_parallel_final_wrong_columns_order/metadata.json +++ b/parser/testdata/02841_parallel_final_wrong_columns_order/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/02867_create_user_ssh/metadata.json b/parser/testdata/02867_create_user_ssh/metadata.json index 3455a1864a..0967ef424b 100644 --- a/parser/testdata/02867_create_user_ssh/metadata.json +++ b/parser/testdata/02867_create_user_ssh/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt2": true, - "stmt3": true, - "stmt4": true, - "stmt5": true - } -} +{} diff --git a/parser/testdata/02875_final_invalid_read_ranges_bug/metadata.json b/parser/testdata/02875_final_invalid_read_ranges_bug/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/02875_final_invalid_read_ranges_bug/metadata.json +++ b/parser/testdata/02875_final_invalid_read_ranges_bug/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/02890_describe_table_options/metadata.json b/parser/testdata/02890_describe_table_options/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/02890_describe_table_options/metadata.json +++ b/parser/testdata/02890_describe_table_options/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/02905_show_setting_query/metadata.json b/parser/testdata/02905_show_setting_query/metadata.json index 6da6ddac9a..2c63c08510 100644 --- a/parser/testdata/02905_show_setting_query/metadata.json +++ b/parser/testdata/02905_show_setting_query/metadata.json @@ -1,7 +1,2 @@ { - "explain_todo": { - "stmt2": true, - "stmt4": true, - "stmt5": true - } } diff --git a/parser/testdata/02908_empty_named_collection/metadata.json b/parser/testdata/02908_empty_named_collection/metadata.json index f7c9a031b3..0967ef424b 100644 --- a/parser/testdata/02908_empty_named_collection/metadata.json +++ b/parser/testdata/02908_empty_named_collection/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt1": true, - "stmt2": true, - "stmt3": true - } -} +{} diff --git a/parser/testdata/02908_filesystem_cache_as_collection/metadata.json b/parser/testdata/02908_filesystem_cache_as_collection/metadata.json index e9d6e46171..0967ef424b 100644 --- a/parser/testdata/02908_filesystem_cache_as_collection/metadata.json +++ b/parser/testdata/02908_filesystem_cache_as_collection/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt1": true - } -} +{} diff --git a/parser/testdata/02911_add_index_and_materialize_index/metadata.json b/parser/testdata/02911_add_index_and_materialize_index/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/02911_add_index_and_materialize_index/metadata.json +++ b/parser/testdata/02911_add_index_and_materialize_index/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/02918_fuzzjson_table_function/metadata.json b/parser/testdata/02918_fuzzjson_table_function/metadata.json index 682bda1cbc..0967ef424b 100644 --- a/parser/testdata/02918_fuzzjson_table_function/metadata.json +++ b/parser/testdata/02918_fuzzjson_table_function/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt1": true, - "stmt2": true - } -} +{} diff --git a/parser/testdata/02932_query_settings_max_size_drop/metadata.json b/parser/testdata/02932_query_settings_max_size_drop/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/02932_query_settings_max_size_drop/metadata.json +++ b/parser/testdata/02932_query_settings_max_size_drop/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/02932_query_settings_max_size_drop_rmt/metadata.json b/parser/testdata/02932_query_settings_max_size_drop_rmt/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/02932_query_settings_max_size_drop_rmt/metadata.json +++ b/parser/testdata/02932_query_settings_max_size_drop_rmt/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/02935_format_with_arbitrary_types/metadata.json b/parser/testdata/02935_format_with_arbitrary_types/metadata.json index 898e7ce13d..ab953e9485 100644 --- a/parser/testdata/02935_format_with_arbitrary_types/metadata.json +++ b/parser/testdata/02935_format_with_arbitrary_types/metadata.json @@ -2,7 +2,6 @@ "explain_todo": { "stmt19": true, "stmt20": true, - "stmt44": true, - "stmt46": true + "stmt44": true } } diff --git a/parser/testdata/02950_dictionary_short_circuit/metadata.json b/parser/testdata/02950_dictionary_short_circuit/metadata.json index 7bf4b04abe..0967ef424b 100644 --- a/parser/testdata/02950_dictionary_short_circuit/metadata.json +++ b/parser/testdata/02950_dictionary_short_circuit/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt33": true - } -} +{} diff --git a/parser/testdata/02968_projection_merge/metadata.json b/parser/testdata/02968_projection_merge/metadata.json index 7bf4b04abe..0967ef424b 100644 --- a/parser/testdata/02968_projection_merge/metadata.json +++ b/parser/testdata/02968_projection_merge/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt33": true - } -} +{} diff --git a/parser/testdata/02982_create_mv_inner_extra/metadata.json b/parser/testdata/02982_create_mv_inner_extra/metadata.json index daf05a4474..0967ef424b 100644 --- a/parser/testdata/02982_create_mv_inner_extra/metadata.json +++ b/parser/testdata/02982_create_mv_inner_extra/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt11": true, - "stmt8": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/02985_minmax_index_aggregate_function/metadata.json b/parser/testdata/02985_minmax_index_aggregate_function/metadata.json index ef382ce51e..0967ef424b 100644 --- a/parser/testdata/02985_minmax_index_aggregate_function/metadata.json +++ b/parser/testdata/02985_minmax_index_aggregate_function/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt4": true, - "stmt5": true - } -} +{} diff --git a/parser/testdata/02989_join_using_parent_scope/metadata.json b/parser/testdata/02989_join_using_parent_scope/metadata.json index fec152526a..0967ef424b 100644 --- a/parser/testdata/02989_join_using_parent_scope/metadata.json +++ b/parser/testdata/02989_join_using_parent_scope/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt32": true - } -} +{} diff --git a/parser/testdata/02990_parts_splitter_invalid_ranges/metadata.json b/parser/testdata/02990_parts_splitter_invalid_ranges/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/02990_parts_splitter_invalid_ranges/metadata.json +++ b/parser/testdata/02990_parts_splitter_invalid_ranges/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/02990_rmt_replica_path_uuid/metadata.json b/parser/testdata/02990_rmt_replica_path_uuid/metadata.json index 64e53a7fed..1295a45747 100644 --- a/parser/testdata/02990_rmt_replica_path_uuid/metadata.json +++ b/parser/testdata/02990_rmt_replica_path_uuid/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt3": true, - "stmt8": true + "stmt3": true } } diff --git a/parser/testdata/03000_traverse_shadow_system_data_paths/metadata.json b/parser/testdata/03000_traverse_shadow_system_data_paths/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/03000_traverse_shadow_system_data_paths/metadata.json +++ b/parser/testdata/03000_traverse_shadow_system_data_paths/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/03003_count_asterisk_filter/metadata.json b/parser/testdata/03003_count_asterisk_filter/metadata.json index 6bf8d5b80a..0967ef424b 100644 --- a/parser/testdata/03003_count_asterisk_filter/metadata.json +++ b/parser/testdata/03003_count_asterisk_filter/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt5": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/03006_parallel_replicas_prewhere/metadata.json b/parser/testdata/03006_parallel_replicas_prewhere/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/03006_parallel_replicas_prewhere/metadata.json +++ b/parser/testdata/03006_parallel_replicas_prewhere/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/03009_range_dict_get_or_default/metadata.json b/parser/testdata/03009_range_dict_get_or_default/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/03009_range_dict_get_or_default/metadata.json +++ b/parser/testdata/03009_range_dict_get_or_default/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/03011_definitive_guide_to_cast/metadata.json b/parser/testdata/03011_definitive_guide_to_cast/metadata.json index 744aa90a9f..8f729e219a 100644 --- a/parser/testdata/03011_definitive_guide_to_cast/metadata.json +++ b/parser/testdata/03011_definitive_guide_to_cast/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt36": true, - "stmt53": true + "stmt36": true } } diff --git a/parser/testdata/03013_ignore_drop_queries_probability/metadata.json b/parser/testdata/03013_ignore_drop_queries_probability/metadata.json index 43839d9c81..0967ef424b 100644 --- a/parser/testdata/03013_ignore_drop_queries_probability/metadata.json +++ b/parser/testdata/03013_ignore_drop_queries_probability/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt3": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03020_order_by_SimpleAggregateFunction/metadata.json b/parser/testdata/03020_order_by_SimpleAggregateFunction/metadata.json index 2c4349cdf8..0967ef424b 100644 --- a/parser/testdata/03020_order_by_SimpleAggregateFunction/metadata.json +++ b/parser/testdata/03020_order_by_SimpleAggregateFunction/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt7": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03031_low_cardinality_logical_error/metadata.json b/parser/testdata/03031_low_cardinality_logical_error/metadata.json index e9d6e46171..0967ef424b 100644 --- a/parser/testdata/03031_low_cardinality_logical_error/metadata.json +++ b/parser/testdata/03031_low_cardinality_logical_error/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt1": true - } -} +{} diff --git a/parser/testdata/03033_index_definition_sql_udf_bug/metadata.json b/parser/testdata/03033_index_definition_sql_udf_bug/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/03033_index_definition_sql_udf_bug/metadata.json +++ b/parser/testdata/03033_index_definition_sql_udf_bug/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/03033_lightweight_deletes_sync/metadata.json b/parser/testdata/03033_lightweight_deletes_sync/metadata.json index 05aa6dfc72..0967ef424b 100644 --- a/parser/testdata/03033_lightweight_deletes_sync/metadata.json +++ b/parser/testdata/03033_lightweight_deletes_sync/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt4": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03036_dynamic_read_shared_subcolumns_compact_merge_tree/metadata.json b/parser/testdata/03036_dynamic_read_shared_subcolumns_compact_merge_tree/metadata.json index ab9202e88e..0967ef424b 100644 --- a/parser/testdata/03036_dynamic_read_shared_subcolumns_compact_merge_tree/metadata.json +++ b/parser/testdata/03036_dynamic_read_shared_subcolumns_compact_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt11": true - } -} +{} diff --git a/parser/testdata/03036_dynamic_read_shared_subcolumns_memory/metadata.json b/parser/testdata/03036_dynamic_read_shared_subcolumns_memory/metadata.json index ab9202e88e..0967ef424b 100644 --- a/parser/testdata/03036_dynamic_read_shared_subcolumns_memory/metadata.json +++ b/parser/testdata/03036_dynamic_read_shared_subcolumns_memory/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt11": true - } -} +{} diff --git a/parser/testdata/03036_dynamic_read_shared_subcolumns_wide_merge_tree/metadata.json b/parser/testdata/03036_dynamic_read_shared_subcolumns_wide_merge_tree/metadata.json index c45b7602ba..0967ef424b 100644 --- a/parser/testdata/03036_dynamic_read_shared_subcolumns_wide_merge_tree/metadata.json +++ b/parser/testdata/03036_dynamic_read_shared_subcolumns_wide_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt12": true - } -} +{} diff --git a/parser/testdata/03036_dynamic_read_subcolumns_compact_merge_tree/metadata.json b/parser/testdata/03036_dynamic_read_subcolumns_compact_merge_tree/metadata.json index ab9202e88e..0967ef424b 100644 --- a/parser/testdata/03036_dynamic_read_subcolumns_compact_merge_tree/metadata.json +++ b/parser/testdata/03036_dynamic_read_subcolumns_compact_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt11": true - } -} +{} diff --git a/parser/testdata/03036_dynamic_read_subcolumns_memory/metadata.json b/parser/testdata/03036_dynamic_read_subcolumns_memory/metadata.json index ab9202e88e..0967ef424b 100644 --- a/parser/testdata/03036_dynamic_read_subcolumns_memory/metadata.json +++ b/parser/testdata/03036_dynamic_read_subcolumns_memory/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt11": true - } -} +{} diff --git a/parser/testdata/03036_dynamic_read_subcolumns_wide_merge_tree/metadata.json b/parser/testdata/03036_dynamic_read_subcolumns_wide_merge_tree/metadata.json index c45b7602ba..0967ef424b 100644 --- a/parser/testdata/03036_dynamic_read_subcolumns_wide_merge_tree/metadata.json +++ b/parser/testdata/03036_dynamic_read_subcolumns_wide_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt12": true - } -} +{} diff --git a/parser/testdata/03040_dynamic_type_alters_2_compact_merge_tree/metadata.json b/parser/testdata/03040_dynamic_type_alters_2_compact_merge_tree/metadata.json index 7974f6a182..0967ef424b 100644 --- a/parser/testdata/03040_dynamic_type_alters_2_compact_merge_tree/metadata.json +++ b/parser/testdata/03040_dynamic_type_alters_2_compact_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt24": true - } -} +{} diff --git a/parser/testdata/03040_dynamic_type_alters_2_wide_merge_tree/metadata.json b/parser/testdata/03040_dynamic_type_alters_2_wide_merge_tree/metadata.json index b09bea8db0..0967ef424b 100644 --- a/parser/testdata/03040_dynamic_type_alters_2_wide_merge_tree/metadata.json +++ b/parser/testdata/03040_dynamic_type_alters_2_wide_merge_tree/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt25": true - } -} +{} diff --git a/parser/testdata/03071_fix_short_circuit_logic/metadata.json b/parser/testdata/03071_fix_short_circuit_logic/metadata.json index d02612666a..3a06a4a1ac 100644 --- a/parser/testdata/03071_fix_short_circuit_logic/metadata.json +++ b/parser/testdata/03071_fix_short_circuit_logic/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt5": true, - "stmt8": true + "stmt5": true } } diff --git a/parser/testdata/03100_lwu_46_deletes_skip_indexes/metadata.json b/parser/testdata/03100_lwu_46_deletes_skip_indexes/metadata.json index ff0eba6904..0967ef424b 100644 --- a/parser/testdata/03100_lwu_46_deletes_skip_indexes/metadata.json +++ b/parser/testdata/03100_lwu_46_deletes_skip_indexes/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt6": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/03149_analyzer_join_projection_name/metadata.json b/parser/testdata/03149_analyzer_join_projection_name/metadata.json index dbdbb76d4f..0967ef424b 100644 --- a/parser/testdata/03149_analyzer_join_projection_name/metadata.json +++ b/parser/testdata/03149_analyzer_join_projection_name/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt6": true - } -} +{} diff --git a/parser/testdata/03149_analyzer_join_projection_name_2/metadata.json b/parser/testdata/03149_analyzer_join_projection_name_2/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/03149_analyzer_join_projection_name_2/metadata.json +++ b/parser/testdata/03149_analyzer_join_projection_name_2/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/03164_materialize_skip_index_on_merge/metadata.json b/parser/testdata/03164_materialize_skip_index_on_merge/metadata.json index dc0702ac62..0967ef424b 100644 --- a/parser/testdata/03164_materialize_skip_index_on_merge/metadata.json +++ b/parser/testdata/03164_materialize_skip_index_on_merge/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt30": true - } -} +{} diff --git a/parser/testdata/03169_cache_complex_dict_short_circuit_bug/metadata.json b/parser/testdata/03169_cache_complex_dict_short_circuit_bug/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/03169_cache_complex_dict_short_circuit_bug/metadata.json +++ b/parser/testdata/03169_cache_complex_dict_short_circuit_bug/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/03173_forbid_qualify/metadata.json b/parser/testdata/03173_forbid_qualify/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/03173_forbid_qualify/metadata.json +++ b/parser/testdata/03173_forbid_qualify/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/03174_multiple_authentication_methods_show_create/metadata.json b/parser/testdata/03174_multiple_authentication_methods_show_create/metadata.json index aeb01f1428..0967ef424b 100644 --- a/parser/testdata/03174_multiple_authentication_methods_show_create/metadata.json +++ b/parser/testdata/03174_multiple_authentication_methods_show_create/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt1": true, - "stmt4": true - } -} +{} diff --git a/parser/testdata/03174_projection_deduplicate/metadata.json b/parser/testdata/03174_projection_deduplicate/metadata.json index d02612666a..0967ef424b 100644 --- a/parser/testdata/03174_projection_deduplicate/metadata.json +++ b/parser/testdata/03174_projection_deduplicate/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt5": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03206_projection_merge_special_mergetree/metadata.json b/parser/testdata/03206_projection_merge_special_mergetree/metadata.json index 9dfc64b8e0..0967ef424b 100644 --- a/parser/testdata/03206_projection_merge_special_mergetree/metadata.json +++ b/parser/testdata/03206_projection_merge_special_mergetree/metadata.json @@ -1,9 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt19": true, - "stmt23": true, - "stmt4": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03206_projection_merge_special_mergetree_ignore/metadata.json b/parser/testdata/03206_projection_merge_special_mergetree_ignore/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/03206_projection_merge_special_mergetree_ignore/metadata.json +++ b/parser/testdata/03206_projection_merge_special_mergetree_ignore/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/03231_dynamic_variant_in_order_by_group_by/metadata.json b/parser/testdata/03231_dynamic_variant_in_order_by_group_by/metadata.json index 8eb3175658..0967ef424b 100644 --- a/parser/testdata/03231_dynamic_variant_in_order_by_group_by/metadata.json +++ b/parser/testdata/03231_dynamic_variant_in_order_by_group_by/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt17": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03248_with_insert/metadata.json b/parser/testdata/03248_with_insert/metadata.json index c5d9509926..2c63c08510 100644 --- a/parser/testdata/03248_with_insert/metadata.json +++ b/parser/testdata/03248_with_insert/metadata.json @@ -1,7 +1,2 @@ { - "explain_todo": { - "stmt4": true, - "stmt5": true, - "stmt9": true - } } diff --git a/parser/testdata/03254_timeseries_instant_value_aggregate_functions/metadata.json b/parser/testdata/03254_timeseries_instant_value_aggregate_functions/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/03254_timeseries_instant_value_aggregate_functions/metadata.json +++ b/parser/testdata/03254_timeseries_instant_value_aggregate_functions/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/03254_timeseries_to_grid_aggregate_function/metadata.json b/parser/testdata/03254_timeseries_to_grid_aggregate_function/metadata.json index bc141058a4..0967ef424b 100644 --- a/parser/testdata/03254_timeseries_to_grid_aggregate_function/metadata.json +++ b/parser/testdata/03254_timeseries_to_grid_aggregate_function/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt2": true, - "stmt3": true - } -} +{} diff --git a/parser/testdata/03254_timeseries_to_grid_aggregate_function_sparse/metadata.json b/parser/testdata/03254_timeseries_to_grid_aggregate_function_sparse/metadata.json index bc141058a4..0967ef424b 100644 --- a/parser/testdata/03254_timeseries_to_grid_aggregate_function_sparse/metadata.json +++ b/parser/testdata/03254_timeseries_to_grid_aggregate_function_sparse/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt2": true, - "stmt3": true - } -} +{} diff --git a/parser/testdata/03256_invalid_mutation_query/metadata.json b/parser/testdata/03256_invalid_mutation_query/metadata.json index 7ad5569408..0967ef424b 100644 --- a/parser/testdata/03256_invalid_mutation_query/metadata.json +++ b/parser/testdata/03256_invalid_mutation_query/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt9": true - } -} +{} diff --git a/parser/testdata/03257_reverse_sorting_key/metadata.json b/parser/testdata/03257_reverse_sorting_key/metadata.json index ab70e34390..3a06a4a1ac 100644 --- a/parser/testdata/03257_reverse_sorting_key/metadata.json +++ b/parser/testdata/03257_reverse_sorting_key/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt16": true, "stmt5": true } } diff --git a/parser/testdata/03257_reverse_sorting_key_simple/metadata.json b/parser/testdata/03257_reverse_sorting_key_simple/metadata.json index 548eac68c6..3a06a4a1ac 100644 --- a/parser/testdata/03257_reverse_sorting_key_simple/metadata.json +++ b/parser/testdata/03257_reverse_sorting_key_simple/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt12": true, "stmt5": true } } diff --git a/parser/testdata/03257_reverse_sorting_key_zookeeper/metadata.json b/parser/testdata/03257_reverse_sorting_key_zookeeper/metadata.json index 7b4ddafa53..1295a45747 100644 --- a/parser/testdata/03257_reverse_sorting_key_zookeeper/metadata.json +++ b/parser/testdata/03257_reverse_sorting_key_zookeeper/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt3": true, - "stmt4": true + "stmt3": true } } diff --git a/parser/testdata/03261_minmax_indices_by_default/metadata.json b/parser/testdata/03261_minmax_indices_by_default/metadata.json index 4446ba250f..0967ef424b 100644 --- a/parser/testdata/03261_minmax_indices_by_default/metadata.json +++ b/parser/testdata/03261_minmax_indices_by_default/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt14": true, - "stmt39": true, - "stmt41": true - } -} +{} diff --git a/parser/testdata/03273_select_from_explain_ast_non_select/metadata.json b/parser/testdata/03273_select_from_explain_ast_non_select/metadata.json index 3455a1864a..0967ef424b 100644 --- a/parser/testdata/03273_select_from_explain_ast_non_select/metadata.json +++ b/parser/testdata/03273_select_from_explain_ast_non_select/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt2": true, - "stmt3": true, - "stmt4": true, - "stmt5": true - } -} +{} diff --git a/parser/testdata/03279_join_choose_build_table/metadata.json b/parser/testdata/03279_join_choose_build_table/metadata.json index 0438c9b85f..0967ef424b 100644 --- a/parser/testdata/03279_join_choose_build_table/metadata.json +++ b/parser/testdata/03279_join_choose_build_table/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt15": true - } -} +{} diff --git a/parser/testdata/03286_reverse_sorting_key_final/metadata.json b/parser/testdata/03286_reverse_sorting_key_final/metadata.json index a2b51f514d..0967ef424b 100644 --- a/parser/testdata/03286_reverse_sorting_key_final/metadata.json +++ b/parser/testdata/03286_reverse_sorting_key_final/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt2": true, - "stmt7": true - } -} +{} diff --git a/parser/testdata/03286_reverse_sorting_key_final2/metadata.json b/parser/testdata/03286_reverse_sorting_key_final2/metadata.json index f33d7d9f5a..0967ef424b 100644 --- a/parser/testdata/03286_reverse_sorting_key_final2/metadata.json +++ b/parser/testdata/03286_reverse_sorting_key_final2/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt10": true, - "stmt2": true, - "stmt6": true - } -} +{} diff --git a/parser/testdata/03310_index_hints_read_columns/metadata.json b/parser/testdata/03310_index_hints_read_columns/metadata.json index 3f80c78e32..0967ef424b 100644 --- a/parser/testdata/03310_index_hints_read_columns/metadata.json +++ b/parser/testdata/03310_index_hints_read_columns/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt12": true, - "stmt26": true - } -} +{} diff --git a/parser/testdata/03321_inner_materialized_view_nested/metadata.json b/parser/testdata/03321_inner_materialized_view_nested/metadata.json index 3a06a4a1ac..0967ef424b 100644 --- a/parser/testdata/03321_inner_materialized_view_nested/metadata.json +++ b/parser/testdata/03321_inner_materialized_view_nested/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt5": true - } -} +{} diff --git a/parser/testdata/03365_qbit_casts_as_from_array/metadata.json b/parser/testdata/03365_qbit_casts_as_from_array/metadata.json index 9d32d2ea78..0967ef424b 100644 --- a/parser/testdata/03365_qbit_casts_as_from_array/metadata.json +++ b/parser/testdata/03365_qbit_casts_as_from_array/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt20": true, - "stmt21": true, - "stmt22": true, - "stmt23": true - } -} +{} diff --git a/parser/testdata/03369_l2_distance_transposed_variadic/metadata.json b/parser/testdata/03369_l2_distance_transposed_variadic/metadata.json index 90496cea92..0967ef424b 100644 --- a/parser/testdata/03369_l2_distance_transposed_variadic/metadata.json +++ b/parser/testdata/03369_l2_distance_transposed_variadic/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt37": true - } -} +{} diff --git a/parser/testdata/03374_qbit_nullable/metadata.json b/parser/testdata/03374_qbit_nullable/metadata.json index cc2f3624ef..0967ef424b 100644 --- a/parser/testdata/03374_qbit_nullable/metadata.json +++ b/parser/testdata/03374_qbit_nullable/metadata.json @@ -1 +1 @@ -{"explain_todo":{"stmt4":true}} +{} diff --git a/parser/testdata/03381_remote_constants/metadata.json b/parser/testdata/03381_remote_constants/metadata.json index 32d6abfac9..0967ef424b 100644 --- a/parser/testdata/03381_remote_constants/metadata.json +++ b/parser/testdata/03381_remote_constants/metadata.json @@ -1,8 +1 @@ -{ - "explain_todo": { - "stmt14": true, - "stmt15": true, - "stmt16": true, - "stmt17": true - } -} +{} diff --git a/parser/testdata/03404_bfloat16_insert_values/metadata.json b/parser/testdata/03404_bfloat16_insert_values/metadata.json index 24c397911d..0967ef424b 100644 --- a/parser/testdata/03404_bfloat16_insert_values/metadata.json +++ b/parser/testdata/03404_bfloat16_insert_values/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt14": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03442_lightweight_deletes_on_fly/metadata.json b/parser/testdata/03442_lightweight_deletes_on_fly/metadata.json index d316b0c5fc..0967ef424b 100644 --- a/parser/testdata/03442_lightweight_deletes_on_fly/metadata.json +++ b/parser/testdata/03442_lightweight_deletes_on_fly/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt13": true, - "stmt16": true, - "stmt8": true - } -} +{} diff --git a/parser/testdata/03444_flip_coordinates/metadata.json b/parser/testdata/03444_flip_coordinates/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/03444_flip_coordinates/metadata.json +++ b/parser/testdata/03444_flip_coordinates/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/03533_skip_index_on_data_reading/metadata.json b/parser/testdata/03533_skip_index_on_data_reading/metadata.json index ca7b266d0b..0967ef424b 100644 --- a/parser/testdata/03533_skip_index_on_data_reading/metadata.json +++ b/parser/testdata/03533_skip_index_on_data_reading/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt24": true, - "stmt26": true - } -} +{} diff --git a/parser/testdata/03538_array_except/metadata.json b/parser/testdata/03538_array_except/metadata.json index dc0702ac62..0967ef424b 100644 --- a/parser/testdata/03538_array_except/metadata.json +++ b/parser/testdata/03538_array_except/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt30": true - } -} +{} diff --git a/parser/testdata/03541_table_without_insertable_columns/metadata.json b/parser/testdata/03541_table_without_insertable_columns/metadata.json index b65b07d7a6..0967ef424b 100644 --- a/parser/testdata/03541_table_without_insertable_columns/metadata.json +++ b/parser/testdata/03541_table_without_insertable_columns/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt4": true - } -} +{} diff --git a/parser/testdata/03549_keeper_map_column_comments/metadata.json b/parser/testdata/03549_keeper_map_column_comments/metadata.json index bc141058a4..0967ef424b 100644 --- a/parser/testdata/03549_keeper_map_column_comments/metadata.json +++ b/parser/testdata/03549_keeper_map_column_comments/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt2": true, - "stmt3": true - } -} +{} diff --git a/parser/testdata/03550_analyzer_remote_view_columns/metadata.json b/parser/testdata/03550_analyzer_remote_view_columns/metadata.json index b563327205..0967ef424b 100644 --- a/parser/testdata/03550_analyzer_remote_view_columns/metadata.json +++ b/parser/testdata/03550_analyzer_remote_view_columns/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt7": true - } -} +{} diff --git a/parser/testdata/03565_union_all_nullptr/metadata.json b/parser/testdata/03565_union_all_nullptr/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/03565_union_all_nullptr/metadata.json +++ b/parser/testdata/03565_union_all_nullptr/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/03580_join_runtime_filter/metadata.json b/parser/testdata/03580_join_runtime_filter/metadata.json index 5cf734c28d..0967ef424b 100644 --- a/parser/testdata/03580_join_runtime_filter/metadata.json +++ b/parser/testdata/03580_join_runtime_filter/metadata.json @@ -1,9 +1 @@ -{ - "explain_todo": { - "stmt27": true, - "stmt28": true, - "stmt30": true, - "stmt32": true, - "stmt34": true - } -} +{} diff --git a/parser/testdata/03580_join_runtime_filter_prewhere/metadata.json b/parser/testdata/03580_join_runtime_filter_prewhere/metadata.json index 15223e732d..0967ef424b 100644 --- a/parser/testdata/03580_join_runtime_filter_prewhere/metadata.json +++ b/parser/testdata/03580_join_runtime_filter_prewhere/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt11": true, - "stmt9": true - } -} +{} diff --git a/parser/testdata/03611_uniqExact_bug/metadata.json b/parser/testdata/03611_uniqExact_bug/metadata.json index 0f7f289811..ef58f80315 100644 --- a/parser/testdata/03611_uniqExact_bug/metadata.json +++ b/parser/testdata/03611_uniqExact_bug/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt10": true, "stmt2": true } } diff --git a/parser/testdata/03654_grouping_sets_any_min_max/metadata.json b/parser/testdata/03654_grouping_sets_any_min_max/metadata.json index 87628c6e85..0967ef424b 100644 --- a/parser/testdata/03654_grouping_sets_any_min_max/metadata.json +++ b/parser/testdata/03654_grouping_sets_any_min_max/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt12": true, - "stmt14": true, - "stmt2": true - } -} +{} diff --git a/parser/testdata/03667_accurate_cast_datetime_overflow/metadata.json b/parser/testdata/03667_accurate_cast_datetime_overflow/metadata.json index bc141058a4..ef58f80315 100644 --- a/parser/testdata/03667_accurate_cast_datetime_overflow/metadata.json +++ b/parser/testdata/03667_accurate_cast_datetime_overflow/metadata.json @@ -1,6 +1,5 @@ { "explain_todo": { - "stmt2": true, - "stmt3": true + "stmt2": true } } diff --git a/parser/testdata/03668_shard_join_in_reverse_order/metadata.json b/parser/testdata/03668_shard_join_in_reverse_order/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/03668_shard_join_in_reverse_order/metadata.json +++ b/parser/testdata/03668_shard_join_in_reverse_order/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/03669_min_max_projection_with_reverse_order_key/metadata.json b/parser/testdata/03669_min_max_projection_with_reverse_order_key/metadata.json index ef58f80315..0967ef424b 100644 --- a/parser/testdata/03669_min_max_projection_with_reverse_order_key/metadata.json +++ b/parser/testdata/03669_min_max_projection_with_reverse_order_key/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt2": true - } -} +{} diff --git a/parser/testdata/03701_column_ttl_fully_expired/metadata.json b/parser/testdata/03701_column_ttl_fully_expired/metadata.json index 7974f6a182..0967ef424b 100644 --- a/parser/testdata/03701_column_ttl_fully_expired/metadata.json +++ b/parser/testdata/03701_column_ttl_fully_expired/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt24": true - } -} +{} diff --git a/parser/testdata/03702_function_dict_get_keys_basic/metadata.json b/parser/testdata/03702_function_dict_get_keys_basic/metadata.json index 3caae9f728..0967ef424b 100644 --- a/parser/testdata/03702_function_dict_get_keys_basic/metadata.json +++ b/parser/testdata/03702_function_dict_get_keys_basic/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt28": true, - "stmt44": true - } -} +{} diff --git a/parser/testdata/03702_optimize_inverse_dictionary_lookup_composite_and_layouts/metadata.json b/parser/testdata/03702_optimize_inverse_dictionary_lookup_composite_and_layouts/metadata.json index bd3a8a9f25..0967ef424b 100644 --- a/parser/testdata/03702_optimize_inverse_dictionary_lookup_composite_and_layouts/metadata.json +++ b/parser/testdata/03702_optimize_inverse_dictionary_lookup_composite_and_layouts/metadata.json @@ -1,7 +1 @@ -{ - "explain_todo": { - "stmt18": true, - "stmt19": true, - "stmt20": true - } -} +{} diff --git a/parser/testdata/03705_count_if_asterisk/metadata.json b/parser/testdata/03705_count_if_asterisk/metadata.json index 1295a45747..0967ef424b 100644 --- a/parser/testdata/03705_count_if_asterisk/metadata.json +++ b/parser/testdata/03705_count_if_asterisk/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt3": true - } -} +{} diff --git a/parser/testdata/03705_function_dict_get_keys_multiple_dict_and_no_caching/metadata.json b/parser/testdata/03705_function_dict_get_keys_multiple_dict_and_no_caching/metadata.json index ab9202e88e..0967ef424b 100644 --- a/parser/testdata/03705_function_dict_get_keys_multiple_dict_and_no_caching/metadata.json +++ b/parser/testdata/03705_function_dict_get_keys_multiple_dict_and_no_caching/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt11": true - } -} +{} diff --git a/parser/testdata/03708_analyzer_convert_any_outer_to_inner_2/metadata.json b/parser/testdata/03708_analyzer_convert_any_outer_to_inner_2/metadata.json index c45b7602ba..0967ef424b 100644 --- a/parser/testdata/03708_analyzer_convert_any_outer_to_inner_2/metadata.json +++ b/parser/testdata/03708_analyzer_convert_any_outer_to_inner_2/metadata.json @@ -1,5 +1 @@ -{ - "explain_todo": { - "stmt12": true - } -} +{} diff --git a/parser/testdata/03709_anti_join_runtime_filters/metadata.json b/parser/testdata/03709_anti_join_runtime_filters/metadata.json index 4ec8630990..0967ef424b 100644 --- a/parser/testdata/03709_anti_join_runtime_filters/metadata.json +++ b/parser/testdata/03709_anti_join_runtime_filters/metadata.json @@ -1,6 +1 @@ -{ - "explain_todo": { - "stmt12": true, - "stmt9": true - } -} +{}