diff --git a/pkg/sql/parser/ddl.go b/pkg/sql/parser/ddl.go index 879070ed..b66d9ff3 100644 --- a/pkg/sql/parser/ddl.go +++ b/pkg/sql/parser/ddl.go @@ -95,6 +95,60 @@ func (p *Parser) parseCreateStatement() (ast.Statement, error) { } return stmt, nil } + + // Snowflake object-type extensions: STAGE, STREAM, TASK, PIPE, FILE FORMAT, + // WAREHOUSE, DATABASE, SCHEMA, ROLE, FUNCTION, PROCEDURE, SEQUENCE. + // Parse-only: record the object kind and name on a DescribeStatement + // placeholder, then consume the rest of the statement body permissively + // until ';' or EOF (tracking balanced parens so embedded expressions with + // semicolons inside string literals round-trip). + if p.dialect == string(keywords.DialectSnowflake) { + kind := strings.ToUpper(p.currentToken.Token.Value) + if kind == "FILE" && strings.EqualFold(p.peekToken().Token.Value, "FORMAT") { + p.advance() // FILE + kind = "FILE FORMAT" + } + switch kind { + case "STAGE", "STREAM", "TASK", "PIPE", "FILE FORMAT", + "WAREHOUSE", "DATABASE", "SCHEMA", "ROLE", "SEQUENCE", + "FUNCTION", "PROCEDURE": + p.advance() // Consume object-kind keyword + // Optional IF NOT EXISTS + if p.isType(models.TokenTypeIf) { + p.advance() + if p.isType(models.TokenTypeNot) { + p.advance() + } + if p.isType(models.TokenTypeExists) { + p.advance() + } + } + // Object name (qualified identifier) + name, _ := p.parseQualifiedName() + // Consume the rest of the statement body until ';' or EOF, + // tracking balanced parens. + depth := 0 + for { + t := p.currentToken.Token.Type + if t == models.TokenTypeEOF { + break + } + if t == models.TokenTypeSemicolon && depth == 0 { + break + } + if t == models.TokenTypeLParen { + depth++ + } else if t == models.TokenTypeRParen { + depth-- + } + p.advance() + } + stub := ast.GetDescribeStatement() + stub.TableName = "CREATE " + kind + " " + name + return stub, nil + } + } + return nil, p.expectedError("TABLE, VIEW, MATERIALIZED VIEW, or INDEX after CREATE") } diff --git a/pkg/sql/parser/snowflake_create_objects_test.go b/pkg/sql/parser/snowflake_create_objects_test.go new file mode 100644 index 00000000..3ea5e874 --- /dev/null +++ b/pkg/sql/parser/snowflake_create_objects_test.go @@ -0,0 +1,47 @@ +// Copyright 2026 GoSQLX Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); + +package parser_test + +import ( + "testing" + + "github.com/ajitpratap0/GoSQLX/pkg/gosqlx" + "github.com/ajitpratap0/GoSQLX/pkg/sql/keywords" +) + +// TestSnowflakeCreateObjects verifies Snowflake CREATE statements for object +// types beyond TABLE/VIEW/INDEX parse. These are currently consumed as +// stub statements (body is not modeled on the AST). Regression for #483. +func TestSnowflakeCreateObjects(t *testing.T) { + queries := map[string]string{ + "create_stage": `CREATE STAGE my_stage URL='s3://bucket/path' CREDENTIALS=(AWS_KEY_ID='abc' AWS_SECRET_KEY='xyz')`, + + "create_file_format": `CREATE FILE FORMAT my_csv TYPE = CSV FIELD_DELIMITER = ','`, + + "create_stream": `CREATE STREAM my_stream ON TABLE events`, + + "create_task": `CREATE TASK daily_refresh WAREHOUSE = compute_wh SCHEDULE = 'USING CRON 0 0 * * * UTC' AS INSERT INTO t SELECT 1`, + + "create_or_replace_pipe": `CREATE OR REPLACE PIPE my_pipe AUTO_INGEST = TRUE AS COPY INTO t FROM @my_stage`, + + "create_warehouse": `CREATE WAREHOUSE my_wh WITH WAREHOUSE_SIZE = 'SMALL'`, + + "create_database": `CREATE DATABASE my_db`, + + "create_schema_qualified": `CREATE SCHEMA analytics.my_schema`, + + "create_role": `CREATE ROLE analyst`, + + "create_if_not_exists_stage": `CREATE STAGE IF NOT EXISTS my_stage URL='s3://bucket'`, + } + for name, q := range queries { + q := q + t.Run(name, func(t *testing.T) { + if _, err := gosqlx.ParseWithDialect(q, keywords.DialectSnowflake); err != nil { + t.Fatalf("parse failed: %v", err) + } + }) + } +}