diff --git a/ast/alter_database_set_statement.go b/ast/alter_database_set_statement.go index b2c3bfec..1a4cc294 100644 --- a/ast/alter_database_set_statement.go +++ b/ast/alter_database_set_statement.go @@ -64,6 +64,27 @@ func (o *OnOffDatabaseOption) createDatabaseOption() {} func (i *IdentifierDatabaseOption) createDatabaseOption() {} func (d *DelayedDurabilityDatabaseOption) createDatabaseOption() {} +// MaxSizeDatabaseOption represents a MAXSIZE option. +type MaxSizeDatabaseOption struct { + OptionKind string `json:"OptionKind,omitempty"` + MaxSize ScalarExpression `json:"MaxSize,omitempty"` + Units string `json:"Units,omitempty"` // "GB", "TB", etc. +} + +func (m *MaxSizeDatabaseOption) node() {} +func (m *MaxSizeDatabaseOption) databaseOption() {} +func (m *MaxSizeDatabaseOption) createDatabaseOption() {} + +// LiteralDatabaseOption represents a database option with a literal value (e.g., EDITION). +type LiteralDatabaseOption struct { + OptionKind string `json:"OptionKind,omitempty"` + Value ScalarExpression `json:"Value,omitempty"` +} + +func (l *LiteralDatabaseOption) node() {} +func (l *LiteralDatabaseOption) databaseOption() {} +func (l *LiteralDatabaseOption) createDatabaseOption() {} + // AlterDatabaseAddFileStatement represents ALTER DATABASE ... ADD FILE statement type AlterDatabaseAddFileStatement struct { DatabaseName *Identifier diff --git a/ast/alter_function_statement.go b/ast/alter_function_statement.go index 18949b28..4cb0a1d4 100644 --- a/ast/alter_function_statement.go +++ b/ast/alter_function_statement.go @@ -43,9 +43,9 @@ type TableValuedFunctionReturnType struct { func (r *TableValuedFunctionReturnType) functionReturnTypeNode() {} -// SelectFunctionReturnType represents a SELECT function return type +// SelectFunctionReturnType represents a SELECT function return type (inline table-valued function) type SelectFunctionReturnType struct { - // Simplified - will be expanded later + SelectStatement *SelectStatement } func (r *SelectFunctionReturnType) functionReturnTypeNode() {} diff --git a/ast/alter_simple_statements.go b/ast/alter_simple_statements.go index 1c76508c..e49e4756 100644 --- a/ast/alter_simple_statements.go +++ b/ast/alter_simple_statements.go @@ -57,7 +57,11 @@ func (s *AlterApplicationRoleStatement) statement() {} // AlterAsymmetricKeyStatement represents an ALTER ASYMMETRIC KEY statement. type AlterAsymmetricKeyStatement struct { - Name *Identifier `json:"Name,omitempty"` + Name *Identifier `json:"Name,omitempty"` + Kind string `json:"Kind,omitempty"` + AttestedBy ScalarExpression `json:"AttestedBy,omitempty"` + EncryptionPassword ScalarExpression `json:"EncryptionPassword,omitempty"` + DecryptionPassword ScalarExpression `json:"DecryptionPassword,omitempty"` } func (s *AlterAsymmetricKeyStatement) node() {} @@ -92,6 +96,19 @@ type AlterPartitionFunctionStatement struct { func (s *AlterPartitionFunctionStatement) node() {} func (s *AlterPartitionFunctionStatement) statement() {} +// CreateFullTextCatalogStatement represents a CREATE FULLTEXT CATALOG statement. +type CreateFullTextCatalogStatement struct { + Name *Identifier `json:"Name,omitempty"` + FileGroup *Identifier `json:"FileGroup,omitempty"` + Path ScalarExpression `json:"Path,omitempty"` + Owner *Identifier `json:"Owner,omitempty"` + Options []*OnOffFullTextCatalogOption `json:"Options,omitempty"` + IsDefault bool `json:"IsDefault"` +} + +func (s *CreateFullTextCatalogStatement) node() {} +func (s *CreateFullTextCatalogStatement) statement() {} + // AlterFulltextCatalogStatement represents an ALTER FULLTEXT CATALOG statement. type AlterFulltextCatalogStatement struct { Name *Identifier `json:"Name,omitempty"` diff --git a/ast/alter_table_alter_index_statement.go b/ast/alter_table_alter_index_statement.go index 042d7b0d..fdcdd1d0 100644 --- a/ast/alter_table_alter_index_statement.go +++ b/ast/alter_table_alter_index_statement.go @@ -34,3 +34,22 @@ type IndexExpressionOption struct { func (i *IndexExpressionOption) indexOption() {} func (i *IndexExpressionOption) node() {} + +// CompressionDelayIndexOption represents a COMPRESSION_DELAY option +type CompressionDelayIndexOption struct { + Expression ScalarExpression + TimeUnit string // "Unitless", "Minute", "Minutes" + OptionKind string // "CompressionDelay" +} + +func (c *CompressionDelayIndexOption) indexOption() {} +func (c *CompressionDelayIndexOption) node() {} + +// OrderIndexOption represents an ORDER option for clustered columnstore indexes +type OrderIndexOption struct { + Columns []*ColumnReferenceExpression + OptionKind string // "Order" +} + +func (o *OrderIndexOption) indexOption() {} +func (o *OrderIndexOption) node() {} diff --git a/ast/alter_trigger_statement.go b/ast/alter_trigger_statement.go index 6842c128..89fc0297 100644 --- a/ast/alter_trigger_statement.go +++ b/ast/alter_trigger_statement.go @@ -6,7 +6,7 @@ type AlterTriggerStatement struct { TriggerObject *TriggerObject TriggerType string // "For", "After", "InsteadOf" TriggerActions []*TriggerAction - Options []*TriggerOption + Options []TriggerOptionType WithAppend bool IsNotForReplication bool MethodSpecifier *MethodSpecifier @@ -28,12 +28,33 @@ type TriggerAction struct { EventTypeGroup *EventTypeContainer // For database/server events } +// TriggerOptionType is the interface for trigger options +type TriggerOptionType interface { + triggerOption() +} + // TriggerOption represents a trigger option type TriggerOption struct { OptionKind string OptionState string } +func (o *TriggerOption) triggerOption() {} + +// ExecuteAsClause represents an EXECUTE AS clause +type ExecuteAsClause struct { + ExecuteAsOption string // Caller, Self, Owner, or specific user + Principal ScalarExpression +} + +// ExecuteAsTriggerOption represents an EXECUTE AS trigger option +type ExecuteAsTriggerOption struct { + OptionKind string // "ExecuteAsClause" + ExecuteAsClause *ExecuteAsClause +} + +func (o *ExecuteAsTriggerOption) triggerOption() {} + // MethodSpecifier represents a CLR method specifier type MethodSpecifier struct { AssemblyName *Identifier diff --git a/ast/backup_statement.go b/ast/backup_statement.go index da5c1a0f..1e81352e 100644 --- a/ast/backup_statement.go +++ b/ast/backup_statement.go @@ -11,6 +11,17 @@ func (s *BackupDatabaseStatement) statementNode() {} func (s *BackupDatabaseStatement) statement() {} func (s *BackupDatabaseStatement) node() {} +// BackupTransactionLogStatement represents a BACKUP LOG statement +type BackupTransactionLogStatement struct { + DatabaseName *IdentifierOrValueExpression + Devices []*DeviceInfo + Options []*BackupOption +} + +func (s *BackupTransactionLogStatement) statementNode() {} +func (s *BackupTransactionLogStatement) statement() {} +func (s *BackupTransactionLogStatement) node() {} + // BackupOption represents a backup option type BackupOption struct { OptionKind string // Compression, NoCompression, StopOnError, ContinueAfterError, etc. @@ -29,3 +40,33 @@ type BackupCertificateStatement struct { func (s *BackupCertificateStatement) statement() {} func (s *BackupCertificateStatement) node() {} + +// BackupServiceMasterKeyStatement represents a BACKUP SERVICE MASTER KEY statement +type BackupServiceMasterKeyStatement struct { + File ScalarExpression + Password ScalarExpression +} + +func (s *BackupServiceMasterKeyStatement) statement() {} +func (s *BackupServiceMasterKeyStatement) node() {} + +// RestoreServiceMasterKeyStatement represents a RESTORE SERVICE MASTER KEY statement +type RestoreServiceMasterKeyStatement struct { + File ScalarExpression + Password ScalarExpression + IsForce bool +} + +func (s *RestoreServiceMasterKeyStatement) statement() {} +func (s *RestoreServiceMasterKeyStatement) node() {} + +// RestoreMasterKeyStatement represents a RESTORE MASTER KEY statement +type RestoreMasterKeyStatement struct { + File ScalarExpression + Password ScalarExpression + EncryptionPassword ScalarExpression + IsForce bool +} + +func (s *RestoreMasterKeyStatement) statement() {} +func (s *RestoreMasterKeyStatement) node() {} diff --git a/ast/begin_end_block_statement.go b/ast/begin_end_block_statement.go index 75707590..cf5f43c6 100644 --- a/ast/begin_end_block_statement.go +++ b/ast/begin_end_block_statement.go @@ -8,6 +8,36 @@ type BeginEndBlockStatement struct { func (b *BeginEndBlockStatement) node() {} func (b *BeginEndBlockStatement) statement() {} +// BeginEndAtomicBlockStatement represents a BEGIN ATOMIC...END block (for Hekaton/In-Memory OLTP). +type BeginEndAtomicBlockStatement struct { + Options []AtomicBlockOption + StatementList *StatementList +} + +func (b *BeginEndAtomicBlockStatement) node() {} +func (b *BeginEndAtomicBlockStatement) statement() {} + +// AtomicBlockOption is an interface for atomic block options. +type AtomicBlockOption interface { + atomicBlockOption() +} + +// IdentifierAtomicBlockOption represents an atomic block option with an identifier value. +type IdentifierAtomicBlockOption struct { + OptionKind string + Value *Identifier +} + +func (o *IdentifierAtomicBlockOption) atomicBlockOption() {} + +// LiteralAtomicBlockOption represents an atomic block option with a literal value. +type LiteralAtomicBlockOption struct { + OptionKind string + Value ScalarExpression +} + +func (o *LiteralAtomicBlockOption) atomicBlockOption() {} + // StatementList is a list of statements. type StatementList struct { Statements []Statement `json:"Statements,omitempty"` diff --git a/ast/create_columnstore_index_statement.go b/ast/create_columnstore_index_statement.go index 8fd671cf..e752d0fc 100644 --- a/ast/create_columnstore_index_statement.go +++ b/ast/create_columnstore_index_statement.go @@ -9,7 +9,7 @@ type CreateColumnStoreIndexStatement struct { Columns []*ColumnReferenceExpression OrderedColumns []*ColumnReferenceExpression IndexOptions []IndexOption - FilterClause ScalarExpression + FilterClause BooleanExpression OnPartition *PartitionSpecifier } diff --git a/ast/create_simple_statements.go b/ast/create_simple_statements.go index 056b59c0..341c4214 100644 --- a/ast/create_simple_statements.go +++ b/ast/create_simple_statements.go @@ -5,6 +5,7 @@ type CreateDatabaseStatement struct { DatabaseName *Identifier `json:"DatabaseName,omitempty"` Options []CreateDatabaseOption `json:"Options,omitempty"` AttachMode string `json:"AttachMode,omitempty"` // "None", "Attach", "AttachRebuildLog" + CopyOf *MultiPartIdentifier `json:"CopyOf,omitempty"` // For AS COPY OF syntax } func (s *CreateDatabaseStatement) node() {} @@ -92,12 +93,63 @@ func (s *CreateCertificateStatement) statement() {} // CreateAsymmetricKeyStatement represents a CREATE ASYMMETRIC KEY statement. type CreateAsymmetricKeyStatement struct { - Name *Identifier `json:"Name,omitempty"` + Name *Identifier `json:"Name,omitempty"` + KeySource EncryptionSource `json:"KeySource,omitempty"` + EncryptionAlgorithm string `json:"EncryptionAlgorithm,omitempty"` + Password ScalarExpression `json:"Password,omitempty"` } func (s *CreateAsymmetricKeyStatement) node() {} func (s *CreateAsymmetricKeyStatement) statement() {} +// EncryptionSource is an interface for key sources. +type EncryptionSource interface { + Node + encryptionSource() +} + +// ProviderEncryptionSource represents a key source from a provider. +type ProviderEncryptionSource struct { + Name *Identifier `json:"Name,omitempty"` + KeyOptions []KeyOption `json:"KeyOptions,omitempty"` +} + +func (p *ProviderEncryptionSource) node() {} +func (p *ProviderEncryptionSource) encryptionSource() {} + +// KeyOption is an interface for key options. +type KeyOption interface { + Node + keyOption() +} + +// AlgorithmKeyOption represents an ALGORITHM key option. +type AlgorithmKeyOption struct { + Algorithm string `json:"Algorithm,omitempty"` + OptionKind string `json:"OptionKind,omitempty"` +} + +func (a *AlgorithmKeyOption) node() {} +func (a *AlgorithmKeyOption) keyOption() {} + +// ProviderKeyNameKeyOption represents a PROVIDER_KEY_NAME key option. +type ProviderKeyNameKeyOption struct { + KeyName ScalarExpression `json:"KeyName,omitempty"` + OptionKind string `json:"OptionKind,omitempty"` +} + +func (p *ProviderKeyNameKeyOption) node() {} +func (p *ProviderKeyNameKeyOption) keyOption() {} + +// CreationDispositionKeyOption represents a CREATION_DISPOSITION key option. +type CreationDispositionKeyOption struct { + IsCreateNew bool `json:"IsCreateNew,omitempty"` + OptionKind string `json:"OptionKind,omitempty"` +} + +func (c *CreationDispositionKeyOption) node() {} +func (c *CreationDispositionKeyOption) keyOption() {} + // CreateSymmetricKeyStatement represents a CREATE SYMMETRIC KEY statement. type CreateSymmetricKeyStatement struct { Name *Identifier `json:"Name,omitempty"` @@ -206,6 +258,34 @@ type CreateTypeStatement struct { func (s *CreateTypeStatement) node() {} func (s *CreateTypeStatement) statement() {} +// CreateTypeUddtStatement represents a CREATE TYPE ... FROM statement (user-defined data type). +type CreateTypeUddtStatement struct { + Name *SchemaObjectName + DataType DataTypeReference + NullableConstraint *NullableConstraintDefinition +} + +func (s *CreateTypeUddtStatement) node() {} +func (s *CreateTypeUddtStatement) statement() {} + +// CreateTypeUdtStatement represents a CREATE TYPE ... EXTERNAL NAME statement (CLR user-defined type). +type CreateTypeUdtStatement struct { + Name *SchemaObjectName + AssemblyName *AssemblyName +} + +func (s *CreateTypeUdtStatement) node() {} +func (s *CreateTypeUdtStatement) statement() {} + +// CreateTypeTableStatement represents a CREATE TYPE ... AS TABLE statement (table type). +type CreateTypeTableStatement struct { + Name *SchemaObjectName `json:"Name,omitempty"` + Definition *TableDefinition `json:"Definition,omitempty"` +} + +func (s *CreateTypeTableStatement) node() {} +func (s *CreateTypeTableStatement) statement() {} + // CreateXmlIndexStatement represents a CREATE XML INDEX statement. type CreateXmlIndexStatement struct { Name *Identifier `json:"Name,omitempty"` diff --git a/ast/create_table_statement.go b/ast/create_table_statement.go index 596c154f..387a5fca 100644 --- a/ast/create_table_statement.go +++ b/ast/create_table_statement.go @@ -25,17 +25,18 @@ func (t *TableDefinition) node() {} // ColumnDefinition represents a column definition in CREATE TABLE type ColumnDefinition struct { - ColumnIdentifier *Identifier - DataType DataTypeReference - Collation *Identifier - DefaultConstraint *DefaultConstraintDefinition - IdentityOptions *IdentityOptions - Constraints []ConstraintDefinition - IsPersisted bool - IsRowGuidCol bool - IsHidden bool - IsMasked bool - Nullable *NullableConstraintDefinition + ColumnIdentifier *Identifier + DataType DataTypeReference + ComputedColumnExpression ScalarExpression + Collation *Identifier + DefaultConstraint *DefaultConstraintDefinition + IdentityOptions *IdentityOptions + Constraints []ConstraintDefinition + IsPersisted bool + IsRowGuidCol bool + IsHidden bool + IsMasked bool + Nullable *NullableConstraintDefinition } func (c *ColumnDefinition) node() {} diff --git a/ast/create_trigger_statement.go b/ast/create_trigger_statement.go index 115ba765..00543360 100644 --- a/ast/create_trigger_statement.go +++ b/ast/create_trigger_statement.go @@ -6,7 +6,7 @@ type CreateTriggerStatement struct { TriggerObject *TriggerObject TriggerType string // "For", "After", "InsteadOf" TriggerActions []*TriggerAction - Options []*TriggerOption + Options []TriggerOptionType WithAppend bool IsNotForReplication bool MethodSpecifier *MethodSpecifier diff --git a/ast/drop_statements.go b/ast/drop_statements.go index 7c9781c5..3cfa1753 100644 --- a/ast/drop_statements.go +++ b/ast/drop_statements.go @@ -232,3 +232,13 @@ type DropAssemblyStatement struct { func (s *DropAssemblyStatement) statement() {} func (s *DropAssemblyStatement) node() {} + +// DropAsymmetricKeyStatement represents a DROP ASYMMETRIC KEY statement +type DropAsymmetricKeyStatement struct { + IsIfExists bool `json:"IsIfExists"` + Name *Identifier `json:"Name,omitempty"` + RemoveProviderKey bool `json:"RemoveProviderKey"` +} + +func (s *DropAsymmetricKeyStatement) statement() {} +func (s *DropAsymmetricKeyStatement) node() {} diff --git a/ast/external_statements.go b/ast/external_statements.go index e6c5f983..b73b3ddc 100644 --- a/ast/external_statements.go +++ b/ast/external_statements.go @@ -84,13 +84,21 @@ type ExternalLanguageOption struct { // CreateExternalLibraryStatement represents CREATE EXTERNAL LIBRARY statement type CreateExternalLibraryStatement struct { - Name *Identifier - Options []*ExternalLibraryOption + Name *Identifier + Owner *Identifier + Language ScalarExpression + ExternalLibraryFiles []*ExternalLibraryFileOption } func (s *CreateExternalLibraryStatement) node() {} func (s *CreateExternalLibraryStatement) statement() {} +// ExternalLibraryFileOption represents a file option for external library +type ExternalLibraryFileOption struct { + Content ScalarExpression + Platform *Identifier +} + // ExternalLibraryOption represents an option for external library type ExternalLibraryOption struct { OptionKind string @@ -117,8 +125,11 @@ func (s *AlterExternalLanguageStatement) statement() {} // AlterExternalLibraryStatement represents ALTER EXTERNAL LIBRARY statement type AlterExternalLibraryStatement struct { - Name *Identifier - Options []*ExternalLibraryOption + Name *Identifier + Owner *Identifier + Language *StringLiteral + ExternalLibraryFiles []*ExternalLibraryFileOption + Options []*ExternalLibraryOption } func (s *AlterExternalLibraryStatement) node() {} diff --git a/ast/function_call.go b/ast/function_call.go index 9a6139c9..000f78bc 100644 --- a/ast/function_call.go +++ b/ast/function_call.go @@ -43,3 +43,45 @@ type FunctionCall struct { func (*FunctionCall) node() {} func (*FunctionCall) scalarExpression() {} + +// CastCall represents a CAST expression: CAST(expression AS data_type) +type CastCall struct { + DataType DataTypeReference `json:"DataType,omitempty"` + Parameter ScalarExpression `json:"Parameter,omitempty"` + Collation *Identifier `json:"Collation,omitempty"` +} + +func (*CastCall) node() {} +func (*CastCall) scalarExpression() {} + +// ConvertCall represents a CONVERT expression: CONVERT(data_type, expression [, style]) +type ConvertCall struct { + DataType DataTypeReference `json:"DataType,omitempty"` + Parameter ScalarExpression `json:"Parameter,omitempty"` + Style ScalarExpression `json:"Style,omitempty"` + Collation *Identifier `json:"Collation,omitempty"` +} + +func (*ConvertCall) node() {} +func (*ConvertCall) scalarExpression() {} + +// TryCastCall represents a TRY_CAST expression +type TryCastCall struct { + DataType DataTypeReference `json:"DataType,omitempty"` + Parameter ScalarExpression `json:"Parameter,omitempty"` + Collation *Identifier `json:"Collation,omitempty"` +} + +func (*TryCastCall) node() {} +func (*TryCastCall) scalarExpression() {} + +// TryConvertCall represents a TRY_CONVERT expression +type TryConvertCall struct { + DataType DataTypeReference `json:"DataType,omitempty"` + Parameter ScalarExpression `json:"Parameter,omitempty"` + Style ScalarExpression `json:"Style,omitempty"` + Collation *Identifier `json:"Collation,omitempty"` +} + +func (*TryConvertCall) node() {} +func (*TryConvertCall) scalarExpression() {} diff --git a/ast/insert_statement.go b/ast/insert_statement.go index fd74f8c9..222e16fb 100644 --- a/ast/insert_statement.go +++ b/ast/insert_statement.go @@ -11,10 +11,25 @@ func (i *InsertStatement) statement() {} // InsertSpecification contains the details of an INSERT. type InsertSpecification struct { - InsertOption string `json:"InsertOption,omitempty"` - InsertSource InsertSource `json:"InsertSource,omitempty"` - Target TableReference `json:"Target,omitempty"` - Columns []*ColumnReferenceExpression `json:"Columns,omitempty"` + InsertOption string `json:"InsertOption,omitempty"` + InsertSource InsertSource `json:"InsertSource,omitempty"` + Target TableReference `json:"Target,omitempty"` + Columns []*ColumnReferenceExpression `json:"Columns,omitempty"` + TopRowFilter *TopRowFilter `json:"TopRowFilter,omitempty"` + OutputClause *OutputClause `json:"OutputClause,omitempty"` + OutputIntoClause *OutputIntoClause `json:"OutputIntoClause,omitempty"` +} + +// OutputClause represents an OUTPUT clause. +type OutputClause struct { + SelectColumns []SelectElement `json:"SelectColumns,omitempty"` +} + +// OutputIntoClause represents an OUTPUT INTO clause. +type OutputIntoClause struct { + SelectColumns []SelectElement `json:"SelectColumns,omitempty"` + IntoTable TableReference `json:"IntoTable,omitempty"` + IntoTableColumns []*ColumnReferenceExpression `json:"IntoTableColumns,omitempty"` } // InsertSource is an interface for INSERT sources. diff --git a/ast/named_table_reference.go b/ast/named_table_reference.go index 97d92565..50d1472f 100644 --- a/ast/named_table_reference.go +++ b/ast/named_table_reference.go @@ -4,7 +4,7 @@ package ast type NamedTableReference struct { SchemaObject *SchemaObjectName `json:"SchemaObject,omitempty"` Alias *Identifier `json:"Alias,omitempty"` - TableHints []*TableHint `json:"TableHints,omitempty"` + TableHints []TableHintType `json:"TableHints,omitempty"` ForPath bool `json:"ForPath,omitempty"` } diff --git a/ast/optimizer_hint.go b/ast/optimizer_hint.go index 061d80b3..ac5e5768 100644 --- a/ast/optimizer_hint.go +++ b/ast/optimizer_hint.go @@ -7,3 +7,13 @@ type OptimizerHint struct { func (*OptimizerHint) node() {} func (*OptimizerHint) optimizerHint() {} + +// TableHintsOptimizerHint represents a TABLE HINT optimizer hint. +type TableHintsOptimizerHint struct { + HintKind string `json:"HintKind,omitempty"` + ObjectName *SchemaObjectName `json:"ObjectName,omitempty"` + TableHints []TableHintType `json:"TableHints,omitempty"` +} + +func (*TableHintsOptimizerHint) node() {} +func (*TableHintsOptimizerHint) optimizerHint() {} diff --git a/ast/restore_statement.go b/ast/restore_statement.go index e73a1ac0..b581fdbf 100644 --- a/ast/restore_statement.go +++ b/ast/restore_statement.go @@ -15,7 +15,7 @@ func (s *RestoreStatement) node() {} // DeviceInfo represents a backup device type DeviceInfo struct { LogicalDevice *IdentifierOrValueExpression - PhysicalDevice *IdentifierOrValueExpression + PhysicalDevice ScalarExpression DeviceType string // "None", "Disk", "Tape", "Pipe", "VirtualDevice", "Database", "URL" PhysicalDeviceType string } diff --git a/ast/select_statement.go b/ast/select_statement.go index 9236901a..f9b64644 100644 --- a/ast/select_statement.go +++ b/ast/select_statement.go @@ -4,6 +4,7 @@ package ast type SelectStatement struct { QueryExpression QueryExpression `json:"QueryExpression,omitempty"` Into *SchemaObjectName `json:"Into,omitempty"` + On *Identifier `json:"On,omitempty"` OptimizerHints []OptimizerHintBase `json:"OptimizerHints,omitempty"` } diff --git a/ast/server_audit_statement.go b/ast/server_audit_statement.go new file mode 100644 index 00000000..cf28fa2b --- /dev/null +++ b/ast/server_audit_statement.go @@ -0,0 +1,95 @@ +package ast + +// CreateServerAuditStatement represents a CREATE SERVER AUDIT statement +type CreateServerAuditStatement struct { + AuditName *Identifier + AuditTarget *AuditTarget + Options []AuditOption + PredicateExpression BooleanExpression +} + +func (s *CreateServerAuditStatement) statement() {} +func (s *CreateServerAuditStatement) node() {} + +// AlterServerAuditStatement represents an ALTER SERVER AUDIT statement +type AlterServerAuditStatement struct { + AuditName *Identifier + AuditTarget *AuditTarget + Options []AuditOption + PredicateExpression BooleanExpression + RemoveWhere bool +} + +func (s *AlterServerAuditStatement) statement() {} +func (s *AlterServerAuditStatement) node() {} + +// AuditTarget represents the target of a server audit +type AuditTarget struct { + TargetKind string // File, ApplicationLog, SecurityLog + TargetOptions []AuditTargetOption +} + +// AuditTargetOption is an interface for audit target options +type AuditTargetOption interface { + auditTargetOption() +} + +// LiteralAuditTargetOption represents an audit target option with a literal value +type LiteralAuditTargetOption struct { + OptionKind string + Value ScalarExpression +} + +func (o *LiteralAuditTargetOption) auditTargetOption() {} + +// AuditOption is an interface for audit options +type AuditOption interface { + auditOption() +} + +// OnFailureAuditOption represents the ON_FAILURE option +type OnFailureAuditOption struct { + OptionKind string + OnFailureAction string // Continue, Shutdown, FailOperation +} + +func (o *OnFailureAuditOption) auditOption() {} + +// QueueDelayAuditOption represents the QUEUE_DELAY option +type QueueDelayAuditOption struct { + OptionKind string + Delay ScalarExpression +} + +func (o *QueueDelayAuditOption) auditOption() {} + +// StateAuditOption represents the STATE option +type StateAuditOption struct { + OptionKind string + Value string // On, Off +} + +func (o *StateAuditOption) auditOption() {} + +// AuditGuidAuditOption represents the AUDIT_GUID option +type AuditGuidAuditOption struct { + OptionKind string + Guid ScalarExpression +} + +func (o *AuditGuidAuditOption) auditOption() {} + +// SourceDeclaration represents a source declaration in an event predicate +type SourceDeclaration struct { + Value *EventSessionObjectName +} + +func (s *SourceDeclaration) node() {} +func (s *SourceDeclaration) scalarExpression() {} + +// EventSessionObjectName represents an event session object name +type EventSessionObjectName struct { + MultiPartIdentifier *MultiPartIdentifier +} + +func (e *EventSessionObjectName) node() {} diff --git a/ast/table_hint.go b/ast/table_hint.go index 82d6fd54..07d37b13 100644 --- a/ast/table_hint.go +++ b/ast/table_hint.go @@ -1,6 +1,21 @@ package ast +// TableHintType is an interface for all table hint types. +type TableHintType interface { + tableHint() +} + // TableHint represents a table hint. type TableHint struct { HintKind string `json:"HintKind,omitempty"` } + +func (*TableHint) tableHint() {} + +// IndexTableHint represents an INDEX table hint with index values. +type IndexTableHint struct { + HintKind string `json:"HintKind,omitempty"` + IndexValues []*IdentifierOrValueExpression `json:"IndexValues,omitempty"` +} + +func (*IndexTableHint) tableHint() {} diff --git a/parser/lexer.go b/parser/lexer.go index c346c6cf..9686f54d 100644 --- a/parser/lexer.go +++ b/parser/lexer.go @@ -466,12 +466,24 @@ func (l *Lexer) NextToken() Token { } case '\'': tok = l.readString() + case '"': + tok = l.readDoubleQuotedIdentifier() default: // Handle $ only if followed by a letter (for pseudo-columns like $ROWGUID) if l.ch == '$' && isLetter(l.peekChar()) { tok = l.readIdentifier() } else if isLetter(l.ch) || l.ch == '_' || l.ch == '@' || l.ch == '#' { tok = l.readIdentifier() + } else if l.ch >= 0x80 { + // Check for Unicode letter at start of identifier + r, _ := l.peekRune() + if unicode.IsLetter(r) { + tok = l.readIdentifier() + } else { + tok.Type = TokenError + tok.Literal = string(l.ch) + l.readChar() + } } else if isDigit(l.ch) { tok = l.readNumber() } else { @@ -591,10 +603,51 @@ func (l *Lexer) skipWhitespaceAndComments() { } } +// isIdentifierChar checks if the current position is a valid identifier character. +// Handles both ASCII and Unicode letters. +func (l *Lexer) isIdentifierChar(first bool) bool { + if l.ch == 0 { + return false + } + // ASCII fast path + if l.ch < 0x80 { + if isLetter(l.ch) || l.ch == '_' || l.ch == '@' || l.ch == '#' { + return true + } + if !first && (isDigit(l.ch) || l.ch == '$') { + return true + } + // $ is valid at start only when followed by a letter (pseudo-columns like $ROWGUID) + // But in an identifier context, $ is valid inside the identifier + if l.ch == '$' { + return true + } + return false + } + // UTF-8: decode rune and check if it's a letter + r, _ := l.peekRune() + return unicode.IsLetter(r) +} + +// advanceIdentifierChar advances past the current identifier character (which may be multi-byte). +func (l *Lexer) advanceIdentifierChar() { + if l.ch < 0x80 { + l.readChar() + return + } + // Multi-byte UTF-8: advance by rune size + _, size := l.peekRune() + for i := 0; i < size; i++ { + l.readChar() + } +} + func (l *Lexer) readIdentifier() Token { startPos := l.pos - for isLetter(l.ch) || isDigit(l.ch) || l.ch == '_' || l.ch == '@' || l.ch == '#' || l.ch == '$' { - l.readChar() + first := true + for l.isIdentifierChar(first) { + l.advanceIdentifierChar() + first = false } literal := l.input[startPos:l.pos] @@ -613,11 +666,43 @@ func (l *Lexer) readIdentifier() Token { func (l *Lexer) readBracketedIdentifier() Token { startPos := l.pos l.readChar() // skip opening [ - for l.ch != 0 && l.ch != ']' { + for l.ch != 0 { + if l.ch == ']' { + if l.peekChar() == ']' { + // Escaped bracket ]], consume both and continue + l.readChar() + l.readChar() + continue + } + // Closing bracket + l.readChar() // skip closing ] + break + } l.readChar() } - if l.ch == ']' { - l.readChar() // skip closing ] + return Token{ + Type: TokenIdent, + Literal: l.input[startPos:l.pos], + Pos: startPos, + } +} + +func (l *Lexer) readDoubleQuotedIdentifier() Token { + startPos := l.pos + l.readChar() // skip opening " + for l.ch != 0 { + if l.ch == '"' { + if l.peekChar() == '"' { + // Escaped quote "", consume both and continue + l.readChar() + l.readChar() + continue + } + // Closing quote + l.readChar() // skip closing " + break + } + l.readChar() } return Token{ Type: TokenIdent, diff --git a/parser/marshal.go b/parser/marshal.go index 287c4b7e..a8f651f6 100644 --- a/parser/marshal.go +++ b/parser/marshal.go @@ -70,6 +70,8 @@ func statementToJSON(stmt ast.Statement) jsonNode { return whileStatementToJSON(s) case *ast.BeginEndBlockStatement: return beginEndBlockStatementToJSON(s) + case *ast.BeginEndAtomicBlockStatement: + return beginEndAtomicBlockStatementToJSON(s) case *ast.CreateViewStatement: return createViewStatementToJSON(s) case *ast.CreateSchemaStatement: @@ -210,6 +212,8 @@ func statementToJSON(stmt ast.Statement) jsonNode { return dropRoleStatementToJSON(s) case *ast.DropAssemblyStatement: return dropAssemblyStatementToJSON(s) + case *ast.DropAsymmetricKeyStatement: + return dropAsymmetricKeyStatementToJSON(s) case *ast.CreateTableStatement: return createTableStatementToJSON(s) case *ast.GrantStatement: @@ -290,6 +294,10 @@ func statementToJSON(stmt ast.Statement) jsonNode { return createServerRoleStatementToJSON(s) case *ast.AlterServerRoleStatement: return alterServerRoleStatementToJSON(s) + case *ast.CreateServerAuditStatement: + return createServerAuditStatementToJSON(s) + case *ast.AlterServerAuditStatement: + return alterServerAuditStatementToJSON(s) case *ast.AlterRemoteServiceBindingStatement: return alterRemoteServiceBindingStatementToJSON(s) case *ast.AlterXmlSchemaCollectionStatement: @@ -330,8 +338,16 @@ func statementToJSON(stmt ast.Statement) jsonNode { return restoreStatementToJSON(s) case *ast.BackupDatabaseStatement: return backupDatabaseStatementToJSON(s) + case *ast.BackupTransactionLogStatement: + return backupTransactionLogStatementToJSON(s) case *ast.BackupCertificateStatement: return backupCertificateStatementToJSON(s) + case *ast.BackupServiceMasterKeyStatement: + return backupServiceMasterKeyStatementToJSON(s) + case *ast.RestoreServiceMasterKeyStatement: + return restoreServiceMasterKeyStatementToJSON(s) + case *ast.RestoreMasterKeyStatement: + return restoreMasterKeyStatementToJSON(s) case *ast.CreateUserStatement: return createUserStatementToJSON(s) case *ast.CreateAggregateStatement: @@ -386,6 +402,12 @@ func statementToJSON(stmt ast.Statement) jsonNode { return createStatisticsStatementToJSON(s) case *ast.CreateTypeStatement: return createTypeStatementToJSON(s) + case *ast.CreateTypeUddtStatement: + return createTypeUddtStatementToJSON(s) + case *ast.CreateTypeUdtStatement: + return createTypeUdtStatementToJSON(s) + case *ast.CreateTypeTableStatement: + return createTypeTableStatementToJSON(s) case *ast.CreateXmlIndexStatement: return createXmlIndexStatementToJSON(s) case *ast.CreatePartitionFunctionStatement: @@ -452,6 +474,8 @@ func statementToJSON(stmt ast.Statement) jsonNode { return alterPartitionFunctionStatementToJSON(s) case *ast.AlterFulltextCatalogStatement: return alterFulltextCatalogStatementToJSON(s) + case *ast.CreateFullTextCatalogStatement: + return createFullTextCatalogStatementToJSON(s) case *ast.AlterFulltextIndexStatement: return alterFulltextIndexStatementToJSON(s) case *ast.AlterSymmetricKeyStatement: @@ -878,6 +902,31 @@ func databaseOptionToJSON(opt ast.DatabaseOption) jsonNode { "Value": o.Value, "OptionKind": o.OptionKind, } + case *ast.MaxSizeDatabaseOption: + node := jsonNode{ + "$type": "MaxSizeDatabaseOption", + } + if o.MaxSize != nil { + node["MaxSize"] = scalarExpressionToJSON(o.MaxSize) + } + if o.Units != "" { + node["Units"] = o.Units + } + if o.OptionKind != "" { + node["OptionKind"] = o.OptionKind + } + return node + case *ast.LiteralDatabaseOption: + node := jsonNode{ + "$type": "LiteralDatabaseOption", + } + if o.Value != nil { + node["Value"] = scalarExpressionToJSON(o.Value) + } + if o.OptionKind != "" { + node["OptionKind"] = o.OptionKind + } + return node default: return jsonNode{"$type": "UnknownDatabaseOption"} } @@ -979,6 +1028,9 @@ func selectStatementToJSON(s *ast.SelectStatement) jsonNode { if s.Into != nil { node["Into"] = schemaObjectNameToJSON(s.Into) } + if s.On != nil { + node["On"] = identifierToJSON(s.On) + } if len(s.OptimizerHints) > 0 { hints := make([]jsonNode, len(s.OptimizerHints)) for i, h := range s.OptimizerHints { @@ -1026,6 +1078,24 @@ func optimizerHintToJSON(h ast.OptimizerHintBase) jsonNode { node["HintKind"] = hint.HintKind } return node + case *ast.TableHintsOptimizerHint: + node := jsonNode{ + "$type": "TableHintsOptimizerHint", + } + if hint.ObjectName != nil { + node["ObjectName"] = schemaObjectNameToJSON(hint.ObjectName) + } + if len(hint.TableHints) > 0 { + hints := make([]jsonNode, len(hint.TableHints)) + for i, h := range hint.TableHints { + hints[i] = tableHintToJSON(h) + } + node["TableHints"] = hints + } + if hint.HintKind != "" { + node["HintKind"] = hint.HintKind + } + return node default: return jsonNode{"$type": "UnknownOptimizerHint"} } @@ -1271,6 +1341,68 @@ func scalarExpressionToJSON(expr ast.ScalarExpression) jsonNode { node["Collation"] = identifierToJSON(e.Collation) } return node + case *ast.CastCall: + node := jsonNode{ + "$type": "CastCall", + } + if e.DataType != nil { + node["DataType"] = dataTypeReferenceToJSON(e.DataType) + } + if e.Parameter != nil { + node["Parameter"] = scalarExpressionToJSON(e.Parameter) + } + if e.Collation != nil { + node["Collation"] = identifierToJSON(e.Collation) + } + return node + case *ast.ConvertCall: + node := jsonNode{ + "$type": "ConvertCall", + } + if e.DataType != nil { + node["DataType"] = dataTypeReferenceToJSON(e.DataType) + } + if e.Parameter != nil { + node["Parameter"] = scalarExpressionToJSON(e.Parameter) + } + if e.Style != nil { + node["Style"] = scalarExpressionToJSON(e.Style) + } + if e.Collation != nil { + node["Collation"] = identifierToJSON(e.Collation) + } + return node + case *ast.TryCastCall: + node := jsonNode{ + "$type": "TryCastCall", + } + if e.DataType != nil { + node["DataType"] = dataTypeReferenceToJSON(e.DataType) + } + if e.Parameter != nil { + node["Parameter"] = scalarExpressionToJSON(e.Parameter) + } + if e.Collation != nil { + node["Collation"] = identifierToJSON(e.Collation) + } + return node + case *ast.TryConvertCall: + node := jsonNode{ + "$type": "TryConvertCall", + } + if e.DataType != nil { + node["DataType"] = dataTypeReferenceToJSON(e.DataType) + } + if e.Parameter != nil { + node["Parameter"] = scalarExpressionToJSON(e.Parameter) + } + if e.Style != nil { + node["Style"] = scalarExpressionToJSON(e.Style) + } + if e.Collation != nil { + node["Collation"] = identifierToJSON(e.Collation) + } + return node case *ast.BinaryExpression: node := jsonNode{ "$type": "BinaryExpression", @@ -1438,6 +1570,14 @@ func scalarExpressionToJSON(expr ast.ScalarExpression) jsonNode { node["ElseExpression"] = scalarExpressionToJSON(e.ElseExpression) } return node + case *ast.SourceDeclaration: + node := jsonNode{ + "$type": "SourceDeclaration", + } + if e.Value != nil { + node["Value"] = eventSessionObjectNameToJSON(e.Value) + } + return node default: return jsonNode{"$type": "UnknownScalarExpression"} } @@ -1472,6 +1612,16 @@ func multiPartIdentifierToJSON(mpi *ast.MultiPartIdentifier) jsonNode { return node } +func eventSessionObjectNameToJSON(e *ast.EventSessionObjectName) jsonNode { + node := jsonNode{ + "$type": "EventSessionObjectName", + } + if e.MultiPartIdentifier != nil { + node["MultiPartIdentifier"] = multiPartIdentifierToJSON(e.MultiPartIdentifier) + } + return node +} + func identifierOrValueExpressionToJSON(iove *ast.IdentifierOrValueExpression) jsonNode { node := jsonNode{ "$type": "IdentifierOrValueExpression", @@ -1856,14 +2006,34 @@ func expressionWithSortOrderToJSON(ewso *ast.ExpressionWithSortOrder) jsonNode { // ======================= New Statement JSON Functions ======================= -func tableHintToJSON(h *ast.TableHint) jsonNode { - node := jsonNode{ - "$type": "TableHint", - } - if h.HintKind != "" { - node["HintKind"] = h.HintKind +func tableHintToJSON(h ast.TableHintType) jsonNode { + switch th := h.(type) { + case *ast.TableHint: + node := jsonNode{ + "$type": "TableHint", + } + if th.HintKind != "" { + node["HintKind"] = th.HintKind + } + return node + case *ast.IndexTableHint: + node := jsonNode{ + "$type": "IndexTableHint", + } + if len(th.IndexValues) > 0 { + values := make([]jsonNode, len(th.IndexValues)) + for i, v := range th.IndexValues { + values[i] = identifierOrValueExpressionToJSON(v) + } + node["IndexValues"] = values + } + if th.HintKind != "" { + node["HintKind"] = th.HintKind + } + return node + default: + return jsonNode{"$type": "TableHint"} } - return node } func insertStatementToJSON(s *ast.InsertStatement) jsonNode { @@ -1885,9 +2055,10 @@ func insertStatementToJSON(s *ast.InsertStatement) jsonNode { func insertSpecificationToJSON(spec *ast.InsertSpecification) jsonNode { node := jsonNode{ - "$type": "InsertSpecification", + "$type": "InsertSpecification", + "InsertOption": "None", } - if spec.InsertOption != "" && spec.InsertOption != "None" { + if spec.InsertOption != "" { node["InsertOption"] = spec.InsertOption } if spec.InsertSource != nil { @@ -1896,6 +2067,15 @@ func insertSpecificationToJSON(spec *ast.InsertSpecification) jsonNode { if spec.Target != nil { node["Target"] = tableReferenceToJSON(spec.Target) } + if spec.TopRowFilter != nil { + node["TopRowFilter"] = topRowFilterToJSON(spec.TopRowFilter) + } + if spec.OutputClause != nil { + node["OutputClause"] = outputClauseToJSON(spec.OutputClause) + } + if spec.OutputIntoClause != nil { + node["OutputIntoClause"] = outputIntoClauseToJSON(spec.OutputIntoClause) + } if len(spec.Columns) > 0 { cols := make([]jsonNode, len(spec.Columns)) for i, c := range spec.Columns { @@ -1906,6 +2086,44 @@ func insertSpecificationToJSON(spec *ast.InsertSpecification) jsonNode { return node } +func outputClauseToJSON(oc *ast.OutputClause) jsonNode { + node := jsonNode{ + "$type": "OutputClause", + } + if len(oc.SelectColumns) > 0 { + cols := make([]jsonNode, len(oc.SelectColumns)) + for i, c := range oc.SelectColumns { + cols[i] = selectElementToJSON(c) + } + node["SelectColumns"] = cols + } + return node +} + +func outputIntoClauseToJSON(oic *ast.OutputIntoClause) jsonNode { + node := jsonNode{ + "$type": "OutputIntoClause", + } + if len(oic.SelectColumns) > 0 { + cols := make([]jsonNode, len(oic.SelectColumns)) + for i, c := range oic.SelectColumns { + cols[i] = selectElementToJSON(c) + } + node["SelectColumns"] = cols + } + if oic.IntoTable != nil { + node["IntoTable"] = tableReferenceToJSON(oic.IntoTable) + } + if len(oic.IntoTableColumns) > 0 { + cols := make([]jsonNode, len(oic.IntoTableColumns)) + for i, c := range oic.IntoTableColumns { + cols[i] = columnReferenceExpressionToJSON(c) + } + node["IntoTableColumns"] = cols + } + return node +} + func insertSourceToJSON(src ast.InsertSource) jsonNode { switch s := src.(type) { case *ast.ValuesInsertSource: @@ -2345,6 +2563,48 @@ func beginEndBlockStatementToJSON(s *ast.BeginEndBlockStatement) jsonNode { return node } +func beginEndAtomicBlockStatementToJSON(s *ast.BeginEndAtomicBlockStatement) jsonNode { + node := jsonNode{ + "$type": "BeginEndAtomicBlockStatement", + } + if len(s.Options) > 0 { + options := make([]jsonNode, len(s.Options)) + for i, o := range s.Options { + options[i] = atomicBlockOptionToJSON(o) + } + node["Options"] = options + } + if s.StatementList != nil { + node["StatementList"] = statementListToJSON(s.StatementList) + } + return node +} + +func atomicBlockOptionToJSON(o ast.AtomicBlockOption) jsonNode { + switch opt := o.(type) { + case *ast.IdentifierAtomicBlockOption: + node := jsonNode{ + "$type": "IdentifierAtomicBlockOption", + "OptionKind": opt.OptionKind, + } + if opt.Value != nil { + node["Value"] = identifierToJSON(opt.Value) + } + return node + case *ast.LiteralAtomicBlockOption: + node := jsonNode{ + "$type": "LiteralAtomicBlockOption", + "OptionKind": opt.OptionKind, + } + if opt.Value != nil { + node["Value"] = scalarExpressionToJSON(opt.Value) + } + return node + default: + return jsonNode{"$type": "UnknownAtomicBlockOption"} + } +} + func statementListToJSON(sl *ast.StatementList) jsonNode { node := jsonNode{ "$type": "StatementList", @@ -2561,6 +2821,23 @@ func (p *Parser) parseColumnDefinition() (*ast.ColumnDefinition, error) { // Parse column name (parseIdentifier already calls nextToken) col.ColumnIdentifier = p.parseIdentifier() + // Check for computed column (AS expression) + if strings.ToUpper(p.curTok.Literal) == "AS" { + p.nextToken() // consume AS + // Parse computed column expression + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + col.ComputedColumnExpression = expr + // Check for PERSISTED + if strings.ToUpper(p.curTok.Literal) == "PERSISTED" { + col.IsPersisted = true + p.nextToken() // consume PERSISTED + } + return col, nil + } + // Parse data type - be lenient if no data type is provided dataType, err := p.parseDataTypeReference() if err != nil { @@ -3735,6 +4012,9 @@ func columnDefinitionToJSON(c *ast.ColumnDefinition) jsonNode { "IsMasked": c.IsMasked, "ColumnIdentifier": identifierToJSON(c.ColumnIdentifier), } + if c.ComputedColumnExpression != nil { + node["ComputedColumnExpression"] = scalarExpressionToJSON(c.ComputedColumnExpression) + } if c.IdentityOptions != nil { node["IdentityOptions"] = identityOptionsToJSON(c.IdentityOptions) } @@ -3801,9 +4081,12 @@ func constraintDefinitionToJSON(c ast.ConstraintDefinition) jsonNode { func uniqueConstraintToJSON(c *ast.UniqueConstraintDefinition) jsonNode { node := jsonNode{ "$type": "UniqueConstraintDefinition", - "Clustered": c.Clustered, "IsPrimaryKey": c.IsPrimaryKey, } + // Output Clustered if it's true, or if IndexType is set (meaning NONCLUSTERED was explicitly specified) + if c.Clustered || c.IndexType != nil { + node["Clustered"] = c.Clustered + } if c.ConstraintIdentifier != nil { node["ConstraintIdentifier"] = identifierToJSON(c.ConstraintIdentifier) } @@ -4599,6 +4882,121 @@ func alterServerRoleStatementToJSON(s *ast.AlterServerRoleStatement) jsonNode { return node } +func createServerAuditStatementToJSON(s *ast.CreateServerAuditStatement) jsonNode { + node := jsonNode{ + "$type": "CreateServerAuditStatement", + } + if s.AuditName != nil { + node["AuditName"] = identifierToJSON(s.AuditName) + } + if s.AuditTarget != nil { + node["AuditTarget"] = auditTargetToJSON(s.AuditTarget) + } + if len(s.Options) > 0 { + options := make([]jsonNode, len(s.Options)) + for i, o := range s.Options { + options[i] = auditOptionToJSON(o) + } + node["Options"] = options + } + if s.PredicateExpression != nil { + node["PredicateExpression"] = booleanExpressionToJSON(s.PredicateExpression) + } + return node +} + +func alterServerAuditStatementToJSON(s *ast.AlterServerAuditStatement) jsonNode { + node := jsonNode{ + "$type": "AlterServerAuditStatement", + "RemoveWhere": s.RemoveWhere, + } + if s.AuditName != nil { + node["AuditName"] = identifierToJSON(s.AuditName) + } + if s.AuditTarget != nil { + node["AuditTarget"] = auditTargetToJSON(s.AuditTarget) + } + if len(s.Options) > 0 { + options := make([]jsonNode, len(s.Options)) + for i, o := range s.Options { + options[i] = auditOptionToJSON(o) + } + node["Options"] = options + } + if s.PredicateExpression != nil { + node["PredicateExpression"] = booleanExpressionToJSON(s.PredicateExpression) + } + return node +} + +func auditTargetToJSON(t *ast.AuditTarget) jsonNode { + node := jsonNode{ + "$type": "AuditTarget", + "TargetKind": t.TargetKind, + } + if len(t.TargetOptions) > 0 { + opts := make([]jsonNode, len(t.TargetOptions)) + for i, o := range t.TargetOptions { + opts[i] = auditTargetOptionToJSON(o) + } + node["TargetOptions"] = opts + } + return node +} + +func auditTargetOptionToJSON(o ast.AuditTargetOption) jsonNode { + switch opt := o.(type) { + case *ast.LiteralAuditTargetOption: + node := jsonNode{ + "$type": "LiteralAuditTargetOption", + "OptionKind": opt.OptionKind, + } + if opt.Value != nil { + node["Value"] = scalarExpressionToJSON(opt.Value) + } + return node + default: + return jsonNode{"$type": "UnknownAuditTargetOption"} + } +} + +func auditOptionToJSON(o ast.AuditOption) jsonNode { + switch opt := o.(type) { + case *ast.OnFailureAuditOption: + return jsonNode{ + "$type": "OnFailureAuditOption", + "OnFailureAction": opt.OnFailureAction, + "OptionKind": opt.OptionKind, + } + case *ast.QueueDelayAuditOption: + node := jsonNode{ + "$type": "QueueDelayAuditOption", + "OptionKind": opt.OptionKind, + } + if opt.Delay != nil { + node["Delay"] = scalarExpressionToJSON(opt.Delay) + } + return node + case *ast.StateAuditOption: + return jsonNode{ + "$type": "StateAuditOption", + "Value": opt.Value, + "OptionKind": opt.OptionKind, + } + case *ast.AuditGuidAuditOption: + node := jsonNode{ + "$type": "AuditGuidAuditOption", + "OptionKind": opt.OptionKind, + } + if opt.Guid != nil { + node["Guid"] = scalarExpressionToJSON(opt.Guid) + } + return node + default: + return jsonNode{"$type": "UnknownAuditOption"} + } +} + func alterRemoteServiceBindingStatementToJSON(s *ast.AlterRemoteServiceBindingStatement) jsonNode { node := jsonNode{ "$type": "AlterRemoteServiceBindingStatement", @@ -4871,11 +5269,21 @@ func nullableConstraintToJSON(n *ast.NullableConstraintDefinition) jsonNode { } } -// parseRestoreStatement parses a RESTORE DATABASE statement -func (p *Parser) parseRestoreStatement() (*ast.RestoreStatement, error) { +// parseRestoreStatement parses a RESTORE statement +func (p *Parser) parseRestoreStatement() (ast.Statement, error) { // Consume RESTORE p.nextToken() + // Check for SERVICE MASTER KEY + if strings.ToUpper(p.curTok.Literal) == "SERVICE" { + return p.parseRestoreServiceMasterKeyStatement() + } + + // Check for MASTER KEY + if strings.ToUpper(p.curTok.Literal) == "MASTER" { + return p.parseRestoreMasterKeyStatement() + } + stmt := &ast.RestoreStatement{} // Parse restore kind (DATABASE, LOG, etc.) @@ -4884,7 +5292,7 @@ func (p *Parser) parseRestoreStatement() (*ast.RestoreStatement, error) { stmt.Kind = "Database" p.nextToken() case "LOG": - stmt.Kind = "Log" + stmt.Kind = "TransactionLog" p.nextToken() default: stmt.Kind = "Database" @@ -4905,9 +5313,14 @@ func (p *Parser) parseRestoreStatement() (*ast.RestoreStatement, error) { } stmt.DatabaseName = dbName - // Expect FROM + // Check for optional FROM clause if strings.ToUpper(p.curTok.Literal) != "FROM" { - return nil, fmt.Errorf("expected FROM, got %s", p.curTok.Literal) + // No FROM clause - just the database name with no devices + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + return stmt, nil } p.nextToken() @@ -5050,40 +5463,205 @@ func (p *Parser) parseRestoreStatement() (*ast.RestoreStatement, error) { return stmt, nil } -// parseCreateUserStatement parses a CREATE USER statement -func (p *Parser) parseCreateUserStatement() (*ast.CreateUserStatement, error) { - // Consume USER +func (p *Parser) parseRestoreServiceMasterKeyStatement() (*ast.RestoreServiceMasterKeyStatement, error) { + // Consume SERVICE p.nextToken() - stmt := &ast.CreateUserStatement{} + // Expect MASTER + if strings.ToUpper(p.curTok.Literal) != "MASTER" { + return nil, fmt.Errorf("expected MASTER after SERVICE, got %s", p.curTok.Literal) + } + p.nextToken() - // Parse user name - stmt.Name = p.parseIdentifier() + // Expect KEY + if p.curTok.Type != TokenKey { + return nil, fmt.Errorf("expected KEY after MASTER, got %s", p.curTok.Literal) + } + p.nextToken() - // Check for login option - if strings.ToUpper(p.curTok.Literal) == "FOR" || strings.ToUpper(p.curTok.Literal) == "FROM" { - isFor := strings.ToUpper(p.curTok.Literal) == "FOR" - p.nextToken() + stmt := &ast.RestoreServiceMasterKeyStatement{} - loginOption := &ast.UserLoginOption{} + // Expect FROM + if strings.ToUpper(p.curTok.Literal) != "FROM" { + return nil, fmt.Errorf("expected FROM after SERVICE MASTER KEY, got %s", p.curTok.Literal) + } + p.nextToken() - switch strings.ToUpper(p.curTok.Literal) { - case "LOGIN": - if isFor { - loginOption.UserLoginOptionType = "ForLogin" - } else { - loginOption.UserLoginOptionType = "FromLogin" - } - p.nextToken() - loginOption.Identifier = p.parseIdentifier() - case "CERTIFICATE": - loginOption.UserLoginOptionType = "FromCertificate" - p.nextToken() - loginOption.Identifier = p.parseIdentifier() - case "ASYMMETRIC": - p.nextToken() // consume ASYMMETRIC - if strings.ToUpper(p.curTok.Literal) == "KEY" { - p.nextToken() // consume KEY + // Expect FILE + if strings.ToUpper(p.curTok.Literal) != "FILE" { + return nil, fmt.Errorf("expected FILE after FROM, got %s", p.curTok.Literal) + } + p.nextToken() + + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after FILE, got %s", p.curTok.Literal) + } + p.nextToken() + + // Parse file path + file, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.File = file + + // Parse DECRYPTION BY PASSWORD clause + if strings.ToUpper(p.curTok.Literal) == "DECRYPTION" { + p.nextToken() // consume DECRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + if p.curTok.Type == TokenEquals { + p.nextToken() + } + pwd, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.Password = pwd + } + } + + // Check for FORCE + if strings.ToUpper(p.curTok.Literal) == "FORCE" { + stmt.IsForce = true + p.nextToken() + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + +func (p *Parser) parseRestoreMasterKeyStatement() (*ast.RestoreMasterKeyStatement, error) { + // Consume MASTER + p.nextToken() + + // Expect KEY + if p.curTok.Type != TokenKey { + return nil, fmt.Errorf("expected KEY after MASTER, got %s", p.curTok.Literal) + } + p.nextToken() + + stmt := &ast.RestoreMasterKeyStatement{} + + // Expect FROM + if strings.ToUpper(p.curTok.Literal) != "FROM" { + return nil, fmt.Errorf("expected FROM after MASTER KEY, got %s", p.curTok.Literal) + } + p.nextToken() + + // Expect FILE + if strings.ToUpper(p.curTok.Literal) != "FILE" { + return nil, fmt.Errorf("expected FILE after FROM, got %s", p.curTok.Literal) + } + p.nextToken() + + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after FILE, got %s", p.curTok.Literal) + } + p.nextToken() + + // Parse file path + file, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.File = file + + // Parse DECRYPTION BY PASSWORD clause + if strings.ToUpper(p.curTok.Literal) == "DECRYPTION" { + p.nextToken() // consume DECRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + if p.curTok.Type == TokenEquals { + p.nextToken() + } + pwd, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.Password = pwd + } + } + + // Parse ENCRYPTION BY PASSWORD clause + if strings.ToUpper(p.curTok.Literal) == "ENCRYPTION" { + p.nextToken() // consume ENCRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + if p.curTok.Type == TokenEquals { + p.nextToken() + } + pwd, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.EncryptionPassword = pwd + } + } + + // Check for FORCE + if strings.ToUpper(p.curTok.Literal) == "FORCE" { + stmt.IsForce = true + p.nextToken() + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + +// parseCreateUserStatement parses a CREATE USER statement +func (p *Parser) parseCreateUserStatement() (*ast.CreateUserStatement, error) { + // Consume USER + p.nextToken() + + stmt := &ast.CreateUserStatement{} + + // Parse user name + stmt.Name = p.parseIdentifier() + + // Check for login option + if strings.ToUpper(p.curTok.Literal) == "FOR" || strings.ToUpper(p.curTok.Literal) == "FROM" { + isFor := strings.ToUpper(p.curTok.Literal) == "FOR" + p.nextToken() + + loginOption := &ast.UserLoginOption{} + + switch strings.ToUpper(p.curTok.Literal) { + case "LOGIN": + if isFor { + loginOption.UserLoginOptionType = "ForLogin" + } else { + loginOption.UserLoginOptionType = "FromLogin" + } + p.nextToken() + loginOption.Identifier = p.parseIdentifier() + case "CERTIFICATE": + loginOption.UserLoginOptionType = "FromCertificate" + p.nextToken() + loginOption.Identifier = p.parseIdentifier() + case "ASYMMETRIC": + p.nextToken() // consume ASYMMETRIC + if strings.ToUpper(p.curTok.Literal) == "KEY" { + p.nextToken() // consume KEY } loginOption.UserLoginOptionType = "FromAsymmetricKey" loginOption.Identifier = p.parseIdentifier() @@ -5322,8 +5900,8 @@ func (p *Parser) parseCreateColumnStoreIndexStatement() (*ast.CreateColumnStoreI } } - // Parse optional ORDER clause - if strings.ToUpper(p.curTok.Literal) == "ORDER" { + // Parse optional ORDER clause (Azure Synapse/DW syntax - ORDER directly after ON table) + if p.curTok.Type == TokenOrder || strings.ToUpper(p.curTok.Literal) == "ORDER" { p.nextToken() // consume ORDER if p.curTok.Type == TokenLParen { p.nextToken() // consume ( @@ -5349,19 +5927,110 @@ func (p *Parser) parseCreateColumnStoreIndexStatement() (*ast.CreateColumnStoreI } } - // Skip optional WITH clause for now + // Parse optional WHERE clause (filtered index) + if p.curTok.Type == TokenWhere { + p.nextToken() // consume WHERE + pred, err := p.parseBooleanExpression() + if err != nil { + return nil, err + } + stmt.FilterClause = pred + } + + // Parse optional WITH clause if p.curTok.Type == TokenWith { - // TODO: parse WITH options - p.nextToken() + p.nextToken() // consume WITH if p.curTok.Type == TokenLParen { - p.nextToken() - depth := 1 - for depth > 0 && p.curTok.Type != TokenEOF { - if p.curTok.Type == TokenLParen { - depth++ - } else if p.curTok.Type == TokenRParen { - depth-- + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + if p.curTok.Type == TokenComma { + p.nextToken() + continue } + + optName := strings.ToUpper(p.curTok.Literal) + switch optName { + case "COMPRESSION_DELAY": + p.nextToken() // consume COMPRESSION_DELAY + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + opt := &ast.CompressionDelayIndexOption{ + Expression: expr, + TimeUnit: "Unitless", + OptionKind: "CompressionDelay", + } + // Check for MINUTE/MINUTES + if strings.ToUpper(p.curTok.Literal) == "MINUTE" { + opt.TimeUnit = "Minute" + p.nextToken() + } else if strings.ToUpper(p.curTok.Literal) == "MINUTES" { + opt.TimeUnit = "Minutes" + p.nextToken() + } + stmt.IndexOptions = append(stmt.IndexOptions, opt) + + case "SORT_IN_TEMPDB": + p.nextToken() // consume SORT_IN_TEMPDB + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + state := "NotSet" + if p.curTok.Type == TokenOn { + state = "On" + p.nextToken() + } else if strings.ToUpper(p.curTok.Literal) == "OFF" { + state = "Off" + p.nextToken() + } + stmt.IndexOptions = append(stmt.IndexOptions, &ast.IndexStateOption{ + OptionKind: "SortInTempDB", + OptionState: state, + }) + + case "ORDER": + p.nextToken() // consume ORDER + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + orderOpt := &ast.OrderIndexOption{ + OptionKind: "Order", + } + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + colRef := &ast.ColumnReferenceExpression{ + ColumnType: "Regular", + MultiPartIdentifier: &ast.MultiPartIdentifier{ + Identifiers: []*ast.Identifier{p.parseIdentifier()}, + }, + } + colRef.MultiPartIdentifier.Count = len(colRef.MultiPartIdentifier.Identifiers) + orderOpt.Columns = append(orderOpt.Columns, colRef) + + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + stmt.IndexOptions = append(stmt.IndexOptions, orderOpt) + } + + default: + // Skip unknown options + p.nextToken() + if p.curTok.Type == TokenEquals { + p.nextToken() + p.nextToken() // skip value + } + } + } + if p.curTok.Type == TokenRParen { p.nextToken() } } @@ -5417,7 +6086,7 @@ func (p *Parser) parseAlterFunctionStatement() (*ast.AlterFunctionStatement, err } // Parse data type if present - if p.curTok.Type != TokenRParen && p.curTok.Type != TokenComma { + if p.curTok.Type != TokenRParen && p.curTok.Type != TokenComma && p.curTok.Type != TokenEquals { dataType, err := p.parseDataType() if err != nil { return nil, err @@ -5425,6 +6094,16 @@ func (p *Parser) parseAlterFunctionStatement() (*ast.AlterFunctionStatement, err param.DataType = dataType } + // Parse optional default value + if p.curTok.Type == TokenEquals { + p.nextToken() + val, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + param.Value = val + } + stmt.Parameters = append(stmt.Parameters, param) if p.curTok.Type == TokenComma { @@ -5445,26 +6124,77 @@ func (p *Parser) parseAlterFunctionStatement() (*ast.AlterFunctionStatement, err } p.nextToken() - // Parse return type - returnDataType, err := p.parseDataType() - if err != nil { - return nil, err - } - stmt.ReturnType = &ast.ScalarFunctionReturnType{ - DataType: returnDataType, - } - - // Parse AS - if p.curTok.Type == TokenAs { + // Check if RETURNS TABLE + if strings.ToUpper(p.curTok.Literal) == "TABLE" { p.nextToken() - } - // Parse statement list - stmtList, err := p.parseFunctionStatementList() - if err != nil { - return nil, err + // Parse optional WITH clause for options + if strings.ToUpper(p.curTok.Literal) == "WITH" { + p.nextToken() + for { + opt := &ast.FunctionOption{} + switch strings.ToUpper(p.curTok.Literal) { + case "SCHEMABINDING": + opt.OptionKind = "SchemaBinding" + case "ENCRYPTION": + opt.OptionKind = "Encryption" + case "NATIVE_COMPILATION": + opt.OptionKind = "NativeCompilation" + default: + opt.OptionKind = capitalizeFirst(p.curTok.Literal) + } + p.nextToken() + stmt.Options = append(stmt.Options, opt) + + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + } + + // Parse AS + if p.curTok.Type == TokenAs { + p.nextToken() + } + + // For inline table-valued functions, parse RETURN SELECT... + if strings.ToUpper(p.curTok.Literal) == "RETURN" { + p.nextToken() + // Parse the SELECT statement + selectStmt, err := p.parseStatement() + if err != nil { + return nil, err + } + if sel, ok := selectStmt.(*ast.SelectStatement); ok { + stmt.ReturnType = &ast.SelectFunctionReturnType{ + SelectStatement: sel, + } + } + } + } else { + // Scalar function - parse return type + returnDataType, err := p.parseDataType() + if err != nil { + return nil, err + } + stmt.ReturnType = &ast.ScalarFunctionReturnType{ + DataType: returnDataType, + } + + // Parse AS + if p.curTok.Type == TokenAs { + p.nextToken() + } + + // Parse statement list + stmtList, err := p.parseFunctionStatementList() + if err != nil { + return nil, err + } + stmt.StatementList = stmtList } - stmt.StatementList = stmtList // Skip optional semicolon if p.curTok.Type == TokenSemicolon { @@ -5679,6 +6409,41 @@ func (p *Parser) parseAlterIndexStatement() (*ast.AlterIndexStatement, error) { case "SET": stmt.AlterIndexType = "Set" p.nextToken() + // Parse SET options (SET (...)) + if p.curTok.Type == TokenLParen { + p.nextToken() + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + optionName := strings.ToUpper(p.curTok.Literal) + p.nextToken() + + if p.curTok.Type == TokenEquals { + p.nextToken() + valueStr := strings.ToUpper(p.curTok.Literal) + p.nextToken() + + if valueStr == "ON" || valueStr == "OFF" { + opt := &ast.IndexStateOption{ + OptionKind: p.getIndexOptionKind(optionName), + OptionState: p.capitalizeFirst(strings.ToLower(valueStr)), + } + stmt.IndexOptions = append(stmt.IndexOptions, opt) + } else { + opt := &ast.IndexExpressionOption{ + OptionKind: p.getIndexOptionKind(optionName), + Expression: &ast.IntegerLiteral{Value: valueStr}, + } + stmt.IndexOptions = append(stmt.IndexOptions, opt) + } + } + + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + } case "RESUME": stmt.AlterIndexType = "Resume" p.nextToken() @@ -5766,20 +6531,21 @@ func (p *Parser) parseAlterIndexStatement() (*ast.AlterIndexStatement, error) { func (p *Parser) getIndexOptionKind(optionName string) string { optionMap := map[string]string{ - "PAD_INDEX": "PadIndex", - "FILLFACTOR": "FillFactor", - "SORT_IN_TEMPDB": "SortInTempDB", - "IGNORE_DUP_KEY": "IgnoreDupKey", - "STATISTICS_NORECOMPUTE": "StatisticsNoRecompute", - "DROP_EXISTING": "DropExisting", - "ONLINE": "Online", - "ALLOW_ROW_LOCKS": "AllowRowLocks", - "ALLOW_PAGE_LOCKS": "AllowPageLocks", - "MAXDOP": "MaxDop", - "DATA_COMPRESSION": "DataCompression", - "RESUMABLE": "Resumable", - "MAX_DURATION": "MaxDuration", - "WAIT_AT_LOW_PRIORITY": "WaitAtLowPriority", + "PAD_INDEX": "PadIndex", + "FILLFACTOR": "FillFactor", + "SORT_IN_TEMPDB": "SortInTempDB", + "IGNORE_DUP_KEY": "IgnoreDupKey", + "STATISTICS_NORECOMPUTE": "StatisticsNoRecompute", + "DROP_EXISTING": "DropExisting", + "ONLINE": "Online", + "ALLOW_ROW_LOCKS": "AllowRowLocks", + "ALLOW_PAGE_LOCKS": "AllowPageLocks", + "MAXDOP": "MaxDop", + "DATA_COMPRESSION": "DataCompression", + "RESUMABLE": "Resumable", + "MAX_DURATION": "MaxDuration", + "WAIT_AT_LOW_PRIORITY": "WaitAtLowPriority", + "OPTIMIZE_FOR_SEQUENTIAL_KEY": "OptimizeForSequentialKey", } if kind, ok := optionMap[optionName]; ok { return kind @@ -5828,13 +6594,19 @@ func (p *Parser) parseCreateFunctionStatement() (*ast.CreateFunctionStatement, e // Parse data type if present if p.curTok.Type != TokenRParen && p.curTok.Type != TokenComma { - dataType, err := p.parseDataType() + dataType, err := p.parseDataTypeReference() if err != nil { return nil, err } param.DataType = dataType } + // Check for READONLY modifier + if strings.ToUpper(p.curTok.Literal) == "READONLY" { + param.Modifier = "ReadOnly" + p.nextToken() + } + stmt.Parameters = append(stmt.Parameters, param) if p.curTok.Type == TokenComma { @@ -5857,7 +6629,7 @@ func (p *Parser) parseCreateFunctionStatement() (*ast.CreateFunctionStatement, e p.nextToken() // Parse return type - returnDataType, err := p.parseDataType() + returnDataType, err := p.parseDataTypeReference() if err != nil { p.skipToEndOfStatement() return stmt, nil @@ -5933,6 +6705,55 @@ func (p *Parser) parseCreateTriggerStatement() (*ast.CreateTriggerStatement, err } stmt.TriggerObject = triggerObject + // Parse optional WITH clause + if p.curTok.Type == TokenWith { + p.nextToken() // consume WITH + for { + optName := strings.ToUpper(p.curTok.Literal) + switch optName { + case "NATIVE_COMPILATION": + stmt.Options = append(stmt.Options, &ast.TriggerOption{OptionKind: "NativeCompile"}) + p.nextToken() + case "SCHEMABINDING": + stmt.Options = append(stmt.Options, &ast.TriggerOption{OptionKind: "SchemaBinding"}) + p.nextToken() + case "ENCRYPTION": + stmt.Options = append(stmt.Options, &ast.TriggerOption{OptionKind: "Encryption"}) + p.nextToken() + case "EXECUTE": + p.nextToken() // consume EXECUTE + if p.curTok.Type == TokenAs { + p.nextToken() // consume AS + } + execAsClause := &ast.ExecuteAsClause{} + switch strings.ToUpper(p.curTok.Literal) { + case "CALLER": + execAsClause.ExecuteAsOption = "Caller" + case "SELF": + execAsClause.ExecuteAsOption = "Self" + case "OWNER": + execAsClause.ExecuteAsOption = "Owner" + default: + // User name + execAsClause.ExecuteAsOption = "User" + } + p.nextToken() + stmt.Options = append(stmt.Options, &ast.ExecuteAsTriggerOption{ + OptionKind: "ExecuteAsClause", + ExecuteAsClause: execAsClause, + }) + default: + // Unknown option, skip it + p.nextToken() + } + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + } + // Parse trigger type (FOR, AFTER, INSTEAD OF) switch strings.ToUpper(p.curTok.Literal) { case "FOR": @@ -6076,6 +6897,30 @@ func backupDatabaseStatementToJSON(s *ast.BackupDatabaseStatement) jsonNode { return node } +func backupTransactionLogStatementToJSON(s *ast.BackupTransactionLogStatement) jsonNode { + node := jsonNode{ + "$type": "BackupTransactionLogStatement", + } + if s.DatabaseName != nil { + node["DatabaseName"] = identifierOrValueExpressionToJSON(s.DatabaseName) + } + if len(s.Options) > 0 { + options := make([]jsonNode, len(s.Options)) + for i, o := range s.Options { + options[i] = backupOptionToJSON(o) + } + node["Options"] = options + } + if len(s.Devices) > 0 { + devices := make([]jsonNode, len(s.Devices)) + for i, d := range s.Devices { + devices[i] = deviceInfoToJSON(d) + } + node["Devices"] = devices + } + return node +} + func backupCertificateStatementToJSON(s *ast.BackupCertificateStatement) jsonNode { node := jsonNode{ "$type": "BackupCertificateStatement", @@ -6086,15 +6931,59 @@ func backupCertificateStatementToJSON(s *ast.BackupCertificateStatement) jsonNod if s.Name != nil { node["Name"] = identifierToJSON(s.Name) } - node["ActiveForBeginDialog"] = s.ActiveForBeginDialog - if s.PrivateKeyPath != nil { - node["PrivateKeyPath"] = scalarExpressionToJSON(s.PrivateKeyPath) + node["ActiveForBeginDialog"] = s.ActiveForBeginDialog + if s.PrivateKeyPath != nil { + node["PrivateKeyPath"] = scalarExpressionToJSON(s.PrivateKeyPath) + } + if s.EncryptionPassword != nil { + node["EncryptionPassword"] = scalarExpressionToJSON(s.EncryptionPassword) + } + if s.DecryptionPassword != nil { + node["DecryptionPassword"] = scalarExpressionToJSON(s.DecryptionPassword) + } + return node +} + +func backupServiceMasterKeyStatementToJSON(s *ast.BackupServiceMasterKeyStatement) jsonNode { + node := jsonNode{ + "$type": "BackupServiceMasterKeyStatement", + } + if s.File != nil { + node["File"] = scalarExpressionToJSON(s.File) + } + if s.Password != nil { + node["Password"] = scalarExpressionToJSON(s.Password) + } + return node +} + +func restoreServiceMasterKeyStatementToJSON(s *ast.RestoreServiceMasterKeyStatement) jsonNode { + node := jsonNode{ + "$type": "RestoreServiceMasterKeyStatement", + "IsForce": s.IsForce, + } + if s.File != nil { + node["File"] = scalarExpressionToJSON(s.File) + } + if s.Password != nil { + node["Password"] = scalarExpressionToJSON(s.Password) + } + return node +} + +func restoreMasterKeyStatementToJSON(s *ast.RestoreMasterKeyStatement) jsonNode { + node := jsonNode{ + "$type": "RestoreMasterKeyStatement", + "IsForce": s.IsForce, } if s.EncryptionPassword != nil { node["EncryptionPassword"] = scalarExpressionToJSON(s.EncryptionPassword) } - if s.DecryptionPassword != nil { - node["DecryptionPassword"] = scalarExpressionToJSON(s.DecryptionPassword) + if s.File != nil { + node["File"] = scalarExpressionToJSON(s.File) + } + if s.Password != nil { + node["Password"] = scalarExpressionToJSON(s.Password) } return node } @@ -6119,7 +7008,7 @@ func deviceInfoToJSON(d *ast.DeviceInfo) jsonNode { node["LogicalDevice"] = identifierOrValueExpressionToJSON(d.LogicalDevice) } if d.PhysicalDevice != nil { - node["PhysicalDevice"] = identifierOrValueExpressionToJSON(d.PhysicalDevice) + node["PhysicalDevice"] = scalarExpressionToJSON(d.PhysicalDevice) } return node } @@ -6272,6 +7161,16 @@ func createColumnStoreIndexStatementToJSON(s *ast.CreateColumnStoreIndexStatemen } node["Columns"] = cols } + if s.FilterClause != nil { + node["FilterPredicate"] = booleanExpressionToJSON(s.FilterClause) + } + if len(s.IndexOptions) > 0 { + opts := make([]jsonNode, len(s.IndexOptions)) + for i, opt := range s.IndexOptions { + opts[i] = columnStoreIndexOptionToJSON(opt) + } + node["IndexOptions"] = opts + } if len(s.OrderedColumns) > 0 { cols := make([]jsonNode, len(s.OrderedColumns)) for i, col := range s.OrderedColumns { @@ -6282,6 +7181,42 @@ func createColumnStoreIndexStatementToJSON(s *ast.CreateColumnStoreIndexStatemen return node } +func columnStoreIndexOptionToJSON(opt ast.IndexOption) jsonNode { + switch o := opt.(type) { + case *ast.CompressionDelayIndexOption: + node := jsonNode{ + "$type": "CompressionDelayIndexOption", + "OptionKind": o.OptionKind, + "TimeUnit": o.TimeUnit, + } + if o.Expression != nil { + node["Expression"] = scalarExpressionToJSON(o.Expression) + } + return node + case *ast.OrderIndexOption: + node := jsonNode{ + "$type": "OrderIndexOption", + "OptionKind": o.OptionKind, + } + if len(o.Columns) > 0 { + cols := make([]jsonNode, len(o.Columns)) + for i, col := range o.Columns { + cols[i] = columnReferenceExpressionToJSON(col) + } + node["Columns"] = cols + } + return node + case *ast.IndexStateOption: + return jsonNode{ + "$type": "IndexStateOption", + "OptionKind": o.OptionKind, + "OptionState": o.OptionState, + } + default: + return jsonNode{"$type": "UnknownIndexOption"} + } +} + func createSpatialIndexStatementToJSON(s *ast.CreateSpatialIndexStatement) jsonNode { node := jsonNode{ "$type": "CreateSpatialIndexStatement", @@ -6394,12 +7329,33 @@ func alterFunctionStatementToJSON(s *ast.AlterFunctionStatement) jsonNode { if s.ReturnType != nil { node["ReturnType"] = functionReturnTypeToJSON(s.ReturnType) } + if len(s.Options) > 0 { + opts := make([]jsonNode, len(s.Options)) + for i, o := range s.Options { + opts[i] = functionOptionToJSON(o) + } + node["Options"] = opts + } + if len(s.Parameters) > 0 { + params := make([]jsonNode, len(s.Parameters)) + for i, p := range s.Parameters { + params[i] = procedureParameterToJSON(p) + } + node["Parameters"] = params + } if s.StatementList != nil { node["StatementList"] = statementListToJSON(s.StatementList) } return node } +func functionOptionToJSON(o *ast.FunctionOption) jsonNode { + return jsonNode{ + "$type": "FunctionOption", + "OptionKind": o.OptionKind, + } +} + func createFunctionStatementToJSON(s *ast.CreateFunctionStatement) jsonNode { node := jsonNode{ "$type": "CreateFunctionStatement", @@ -6407,6 +7363,13 @@ func createFunctionStatementToJSON(s *ast.CreateFunctionStatement) jsonNode { if s.Name != nil { node["Name"] = schemaObjectNameToJSON(s.Name) } + if len(s.Parameters) > 0 { + params := make([]jsonNode, len(s.Parameters)) + for i, p := range s.Parameters { + params[i] = procedureParameterToJSON(p) + } + node["Parameters"] = params + } if s.ReturnType != nil { node["ReturnType"] = functionReturnTypeToJSON(s.ReturnType) } @@ -6426,6 +7389,14 @@ func functionReturnTypeToJSON(r ast.FunctionReturnType) jsonNode { node["DataType"] = dataTypeReferenceToJSON(rt.DataType) } return node + case *ast.SelectFunctionReturnType: + node := jsonNode{ + "$type": "SelectFunctionReturnType", + } + if rt.SelectStatement != nil { + node["SelectStatement"] = selectStatementToJSON(rt.SelectStatement) + } + return node default: return jsonNode{"$type": "UnknownFunctionReturnType"} } @@ -6470,6 +7441,13 @@ func createTriggerStatementToJSON(s *ast.CreateTriggerStatement) jsonNode { if s.TriggerObject != nil { node["TriggerObject"] = triggerObjectToJSON(s.TriggerObject) } + if len(s.Options) > 0 { + options := make([]jsonNode, len(s.Options)) + for i, o := range s.Options { + options[i] = triggerOptionTypeToJSON(o) + } + node["Options"] = options + } if len(s.TriggerActions) > 0 { actions := make([]jsonNode, len(s.TriggerActions)) for i, a := range s.TriggerActions { @@ -6483,6 +7461,34 @@ func createTriggerStatementToJSON(s *ast.CreateTriggerStatement) jsonNode { return node } +func triggerOptionTypeToJSON(o ast.TriggerOptionType) jsonNode { + switch opt := o.(type) { + case *ast.TriggerOption: + node := jsonNode{ + "$type": "TriggerOption", + "OptionKind": opt.OptionKind, + } + if opt.OptionState != "" { + node["OptionState"] = opt.OptionState + } + return node + case *ast.ExecuteAsTriggerOption: + node := jsonNode{ + "$type": "ExecuteAsTriggerOption", + "OptionKind": opt.OptionKind, + } + if opt.ExecuteAsClause != nil { + node["ExecuteAsClause"] = jsonNode{ + "$type": "ExecuteAsClause", + "ExecuteAsOption": opt.ExecuteAsClause.ExecuteAsOption, + } + } + return node + default: + return jsonNode{"$type": "UnknownTriggerOption"} + } +} + func triggerObjectToJSON(t *ast.TriggerObject) jsonNode { node := jsonNode{ "$type": "TriggerObject", @@ -7281,6 +8287,18 @@ func dropAssemblyStatementToJSON(s *ast.DropAssemblyStatement) jsonNode { return node } +func dropAsymmetricKeyStatementToJSON(s *ast.DropAsymmetricKeyStatement) jsonNode { + node := jsonNode{ + "$type": "DropAsymmetricKeyStatement", + "RemoveProviderKey": s.RemoveProviderKey, + } + if s.Name != nil { + node["Name"] = identifierToJSON(s.Name) + } + node["IsIfExists"] = s.IsIfExists + return node +} + func alterTableTriggerModificationStatementToJSON(s *ast.AlterTableTriggerModificationStatement) jsonNode { node := jsonNode{ "$type": "AlterTableTriggerModificationStatement", @@ -7522,9 +8540,35 @@ func createExternalLibraryStatementToJSON(s *ast.CreateExternalLibraryStatement) node := jsonNode{ "$type": "CreateExternalLibraryStatement", } + if s.Owner != nil { + node["Owner"] = identifierToJSON(s.Owner) + } if s.Name != nil { node["Name"] = identifierToJSON(s.Name) } + if s.Language != nil { + node["Language"] = scalarExpressionToJSON(s.Language) + } + if len(s.ExternalLibraryFiles) > 0 { + files := make([]jsonNode, len(s.ExternalLibraryFiles)) + for i, f := range s.ExternalLibraryFiles { + files[i] = externalLibraryFileOptionToJSON(f) + } + node["ExternalLibraryFiles"] = files + } + return node +} + +func externalLibraryFileOptionToJSON(f *ast.ExternalLibraryFileOption) jsonNode { + node := jsonNode{ + "$type": "ExternalLibraryFileOption", + } + if f.Content != nil { + node["Content"] = scalarExpressionToJSON(f.Content) + } + if f.Platform != nil { + node["Platform"] = identifierToJSON(f.Platform) + } return node } @@ -7745,6 +8789,18 @@ func alterAsymmetricKeyStatementToJSON(s *ast.AlterAsymmetricKeyStatement) jsonN if s.Name != nil { node["Name"] = identifierToJSON(s.Name) } + if s.AttestedBy != nil { + node["AttestedBy"] = scalarExpressionToJSON(s.AttestedBy) + } + if s.Kind != "" { + node["Kind"] = s.Kind + } + if s.EncryptionPassword != nil { + node["EncryptionPassword"] = scalarExpressionToJSON(s.EncryptionPassword) + } + if s.DecryptionPassword != nil { + node["DecryptionPassword"] = scalarExpressionToJSON(s.DecryptionPassword) + } return node } @@ -7824,6 +8880,42 @@ func alterFulltextCatalogStatementToJSON(s *ast.AlterFulltextCatalogStatement) j return node } +func createFullTextCatalogStatementToJSON(s *ast.CreateFullTextCatalogStatement) jsonNode { + node := jsonNode{ + "$type": "CreateFullTextCatalogStatement", + "IsDefault": s.IsDefault, + } + if s.FileGroup != nil { + node["FileGroup"] = identifierToJSON(s.FileGroup) + } + if s.Path != nil { + node["Path"] = scalarExpressionToJSON(s.Path) + } + if s.Owner != nil { + node["Owner"] = identifierToJSON(s.Owner) + } + if s.Name != nil { + node["Name"] = identifierToJSON(s.Name) + } + if len(s.Options) > 0 { + opts := make([]jsonNode, len(s.Options)) + for i, opt := range s.Options { + optNode := jsonNode{ + "$type": "OnOffFullTextCatalogOption", + } + if opt.OptionState != "" { + optNode["OptionState"] = opt.OptionState + } + if opt.OptionKind != "" { + optNode["OptionKind"] = opt.OptionKind + } + opts[i] = optNode + } + node["Options"] = opts + } + return node +} + func alterFulltextIndexStatementToJSON(s *ast.AlterFulltextIndexStatement) jsonNode { node := jsonNode{ "$type": "AlterFulltextIndexStatement", @@ -7892,10 +8984,13 @@ func createDatabaseStatementToJSON(s *ast.CreateDatabaseStatement) jsonNode { opts[i] = createDatabaseOptionToJSON(opt) } node["Options"] = opts - // Only output AttachMode when there are options - if s.AttachMode != "" { - node["AttachMode"] = s.AttachMode - } + } + // AttachMode is output when there are Options or CopyOf + if (len(s.Options) > 0 || s.CopyOf != nil) && s.AttachMode != "" { + node["AttachMode"] = s.AttachMode + } + if s.CopyOf != nil { + node["CopyOf"] = multiPartIdentifierToJSON(s.CopyOf) } return node } @@ -7917,6 +9012,31 @@ func createDatabaseOptionToJSON(opt ast.CreateDatabaseOption) jsonNode { node["Value"] = identifierToJSON(o.Value) } return node + case *ast.MaxSizeDatabaseOption: + node := jsonNode{ + "$type": "MaxSizeDatabaseOption", + } + if o.MaxSize != nil { + node["MaxSize"] = scalarExpressionToJSON(o.MaxSize) + } + if o.Units != "" { + node["Units"] = o.Units + } + if o.OptionKind != "" { + node["OptionKind"] = o.OptionKind + } + return node + case *ast.LiteralDatabaseOption: + node := jsonNode{ + "$type": "LiteralDatabaseOption", + } + if o.Value != nil { + node["Value"] = scalarExpressionToJSON(o.Value) + } + if o.OptionKind != "" { + node["OptionKind"] = o.OptionKind + } + return node default: return jsonNode{"$type": "CreateDatabaseOption"} } @@ -7952,9 +9072,72 @@ func createAsymmetricKeyStatementToJSON(s *ast.CreateAsymmetricKeyStatement) jso if s.Name != nil { node["Name"] = identifierToJSON(s.Name) } + if s.KeySource != nil { + node["KeySource"] = encryptionSourceToJSON(s.KeySource) + } + if s.EncryptionAlgorithm != "" { + node["EncryptionAlgorithm"] = s.EncryptionAlgorithm + } + if s.Password != nil { + node["Password"] = scalarExpressionToJSON(s.Password) + } + return node +} + +func encryptionSourceToJSON(source ast.EncryptionSource) interface{} { + switch s := source.(type) { + case *ast.ProviderEncryptionSource: + return providerEncryptionSourceToJSON(s) + default: + return nil + } +} + +func providerEncryptionSourceToJSON(s *ast.ProviderEncryptionSource) jsonNode { + node := jsonNode{ + "$type": "ProviderEncryptionSource", + } + if s.Name != nil { + node["Name"] = identifierToJSON(s.Name) + } + if len(s.KeyOptions) > 0 { + options := make([]interface{}, len(s.KeyOptions)) + for i, opt := range s.KeyOptions { + options[i] = keyOptionToJSON(opt) + } + node["KeyOptions"] = options + } return node } +func keyOptionToJSON(opt ast.KeyOption) interface{} { + switch o := opt.(type) { + case *ast.AlgorithmKeyOption: + return jsonNode{ + "$type": "AlgorithmKeyOption", + "Algorithm": o.Algorithm, + "OptionKind": o.OptionKind, + } + case *ast.ProviderKeyNameKeyOption: + node := jsonNode{ + "$type": "ProviderKeyNameKeyOption", + "OptionKind": o.OptionKind, + } + if o.KeyName != nil { + node["KeyName"] = scalarExpressionToJSON(o.KeyName) + } + return node + case *ast.CreationDispositionKeyOption: + return jsonNode{ + "$type": "CreationDispositionKeyOption", + "IsCreateNew": o.IsCreateNew, + "OptionKind": o.OptionKind, + } + default: + return nil + } +} + func createSymmetricKeyStatementToJSON(s *ast.CreateSymmetricKeyStatement) jsonNode { node := jsonNode{ "$type": "CreateSymmetricKeyStatement", @@ -8169,6 +9352,48 @@ func createTypeStatementToJSON(s *ast.CreateTypeStatement) jsonNode { return node } +func createTypeUddtStatementToJSON(s *ast.CreateTypeUddtStatement) jsonNode { + node := jsonNode{ + "$type": "CreateTypeUddtStatement", + } + if s.DataType != nil { + node["DataType"] = dataTypeReferenceToJSON(s.DataType) + } + if s.NullableConstraint != nil { + node["NullableConstraint"] = nullableConstraintToJSON(s.NullableConstraint) + } + if s.Name != nil { + node["Name"] = schemaObjectNameToJSON(s.Name) + } + return node +} + +func createTypeUdtStatementToJSON(s *ast.CreateTypeUdtStatement) jsonNode { + node := jsonNode{ + "$type": "CreateTypeUdtStatement", + } + if s.AssemblyName != nil { + node["AssemblyName"] = assemblyNameToJSON(s.AssemblyName) + } + if s.Name != nil { + node["Name"] = schemaObjectNameToJSON(s.Name) + } + return node +} + +func createTypeTableStatementToJSON(s *ast.CreateTypeTableStatement) jsonNode { + node := jsonNode{ + "$type": "CreateTypeTableStatement", + } + if s.Definition != nil { + node["Definition"] = tableDefinitionToJSON(s.Definition) + } + if s.Name != nil { + node["Name"] = schemaObjectNameToJSON(s.Name) + } + return node +} + func createXmlIndexStatementToJSON(s *ast.CreateXmlIndexStatement) jsonNode { node := jsonNode{ "$type": "CreateXmlIndexStatement", @@ -8583,9 +9808,22 @@ func alterExternalLibraryStatementToJSON(s *ast.AlterExternalLibraryStatement) j node := jsonNode{ "$type": "AlterExternalLibraryStatement", } + if s.Owner != nil { + node["Owner"] = identifierToJSON(s.Owner) + } if s.Name != nil { node["Name"] = identifierToJSON(s.Name) } + if s.Language != nil { + node["Language"] = stringLiteralToJSON(s.Language) + } + if len(s.ExternalLibraryFiles) > 0 { + files := make([]jsonNode, len(s.ExternalLibraryFiles)) + for i, f := range s.ExternalLibraryFiles { + files[i] = externalLibraryFileOptionToJSON(f) + } + node["ExternalLibraryFiles"] = files + } return node } diff --git a/parser/parse_ddl.go b/parser/parse_ddl.go index 273cf36d..bf19646e 100644 --- a/parser/parse_ddl.go +++ b/parser/parse_ddl.go @@ -110,6 +110,8 @@ func (p *Parser) parseDropStatement() (ast.Statement, error) { return p.parseDropAssemblyStatement() case "CRYPTOGRAPHIC": return p.parseDropCryptographicProviderStatement() + case "ASYMMETRIC": + return p.parseDropAsymmetricKeyStatement() } return nil, fmt.Errorf("unexpected token after DROP: %s", p.curTok.Literal) @@ -652,6 +654,50 @@ func (p *Parser) parseDropAssemblyStatement() (*ast.DropAssemblyStatement, error return stmt, nil } +func (p *Parser) parseDropAsymmetricKeyStatement() (*ast.DropAsymmetricKeyStatement, error) { + // Consume ASYMMETRIC + p.nextToken() + + // Expect KEY + if strings.ToUpper(p.curTok.Literal) == "KEY" { + p.nextToken() + } + + stmt := &ast.DropAsymmetricKeyStatement{} + + // Check for IF EXISTS + if p.curTok.Type == TokenIf { + p.nextToken() + if strings.ToUpper(p.curTok.Literal) != "EXISTS" { + return nil, fmt.Errorf("expected EXISTS after IF, got %s", p.curTok.Literal) + } + p.nextToken() + stmt.IsIfExists = true + } + + // Parse key name + stmt.Name = p.parseIdentifier() + + // Check for REMOVE PROVIDER KEY + if strings.ToUpper(p.curTok.Literal) == "REMOVE" { + p.nextToken() // consume REMOVE + if strings.ToUpper(p.curTok.Literal) == "PROVIDER" { + p.nextToken() // consume PROVIDER + if strings.ToUpper(p.curTok.Literal) == "KEY" { + p.nextToken() // consume KEY + } + stmt.RemoveProviderKey = true + } + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + func (p *Parser) parseDropDatabaseStatement() (ast.Statement, error) { // Consume DATABASE p.nextToken() @@ -1577,6 +1623,29 @@ func (p *Parser) parseAlterDatabaseModifyStatement(dbName *ast.Identifier) (ast. // Consume MODIFY p.nextToken() + // Check for Azure-style MODIFY (options) syntax + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + createOpts, err := p.parseAzureDatabaseOptions() + if err != nil { + return nil, err + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + // Convert CreateDatabaseOption to DatabaseOption + opts := make([]ast.DatabaseOption, len(createOpts)) + for i, o := range createOpts { + opts[i] = o.(ast.DatabaseOption) + } + stmt := &ast.AlterDatabaseSetStatement{ + DatabaseName: dbName, + Options: opts, + } + p.skipToEndOfStatement() + return stmt, nil + } + switch strings.ToUpper(p.curTok.Literal) { case "FILE": p.nextToken() // consume FILE @@ -1776,14 +1845,17 @@ func (p *Parser) parseAlterServerConfigurationStatement() (ast.Statement, error) // Consume SERVER p.nextToken() - // Check if it's ALTER SERVER ROLE or ALTER SERVER CONFIGURATION - if strings.ToUpper(p.curTok.Literal) == "ROLE" { + // Check if it's ALTER SERVER ROLE, ALTER SERVER AUDIT, or ALTER SERVER CONFIGURATION + switch strings.ToUpper(p.curTok.Literal) { + case "ROLE": return p.parseAlterServerRoleStatement() + case "AUDIT": + return p.parseAlterServerAuditStatement() } // Expect CONFIGURATION if strings.ToUpper(p.curTok.Literal) != "CONFIGURATION" { - return nil, fmt.Errorf("expected CONFIGURATION or ROLE after SERVER, got %s", p.curTok.Literal) + return nil, fmt.Errorf("expected CONFIGURATION, ROLE, or AUDIT after SERVER, got %s", p.curTok.Literal) } p.nextToken() @@ -3109,6 +3181,81 @@ func (p *Parser) parseAlterServerRoleStatement() (*ast.AlterServerRoleStatement, return stmt, nil } +func (p *Parser) parseAlterServerAuditStatement() (*ast.AlterServerAuditStatement, error) { + // AUDIT keyword should be current token, consume it + p.nextToken() + + stmt := &ast.AlterServerAuditStatement{} + + // Parse audit name + stmt.AuditName = p.parseIdentifier() + + // Check for REMOVE WHERE + if strings.ToUpper(p.curTok.Literal) == "REMOVE" { + p.nextToken() // consume REMOVE + if strings.ToUpper(p.curTok.Literal) == "WHERE" { + p.nextToken() // consume WHERE + stmt.RemoveWhere = true + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + return stmt, nil + } + return nil, fmt.Errorf("expected WHERE after REMOVE, got %s", p.curTok.Literal) + } + + // Parse TO clause (audit target) + if strings.ToUpper(p.curTok.Literal) == "TO" { + p.nextToken() // consume TO + target, err := p.parseAuditTarget() + if err != nil { + return nil, err + } + stmt.AuditTarget = target + } + + // Parse WITH clause (options) + if strings.ToUpper(p.curTok.Literal) == "WITH" { + p.nextToken() // consume WITH + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + opt, err := p.parseAuditOption() + if err != nil { + return nil, err + } + stmt.Options = append(stmt.Options, opt) + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + } + + // Parse WHERE clause (predicate) + if strings.ToUpper(p.curTok.Literal) == "WHERE" { + p.nextToken() // consume WHERE + pred, err := p.parseAuditPredicate() + if err != nil { + return nil, err + } + stmt.PredicateExpression = pred + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + func (p *Parser) parseAlterRemoteServiceBindingStatement() (*ast.AlterRemoteServiceBindingStatement, error) { // Consume REMOTE p.nextToken() @@ -3825,8 +3972,89 @@ func (p *Parser) parseAlterAsymmetricKeyStatement() (*ast.AlterAsymmetricKeyStat // Parse key name stmt.Name = p.parseIdentifier() - // Skip rest of statement - p.skipToEndOfStatement() + // Parse the action + switch strings.ToUpper(p.curTok.Literal) { + case "REMOVE": + p.nextToken() // consume REMOVE + switch strings.ToUpper(p.curTok.Literal) { + case "PRIVATE": + p.nextToken() // consume PRIVATE + if strings.ToUpper(p.curTok.Literal) == "KEY" { + p.nextToken() // consume KEY + } + stmt.Kind = "RemovePrivateKey" + case "ATTESTED": + p.nextToken() // consume ATTESTED + if strings.ToUpper(p.curTok.Literal) == "OPTION" { + p.nextToken() // consume OPTION + } + stmt.Kind = "RemoveAttestedOption" + } + case "ATTESTED": + p.nextToken() // consume ATTESTED + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + attestedBy, _ := p.parseStringLiteral() + stmt.AttestedBy = attestedBy + stmt.Kind = "AttestedBy" + case "WITH": + p.nextToken() // consume WITH + if strings.ToUpper(p.curTok.Literal) == "PRIVATE" { + p.nextToken() // consume PRIVATE + if strings.ToUpper(p.curTok.Literal) == "KEY" { + p.nextToken() // consume KEY + } + } + stmt.Kind = "WithPrivateKey" + // Parse (ENCRYPTION BY PASSWORD = '...', DECRYPTION BY PASSWORD = '...') + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + switch strings.ToUpper(p.curTok.Literal) { + case "ENCRYPTION": + p.nextToken() // consume ENCRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + } + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + pwd, _ := p.parseStringLiteral() + stmt.EncryptionPassword = pwd + case "DECRYPTION": + p.nextToken() // consume DECRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + } + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + pwd, _ := p.parseStringLiteral() + stmt.DecryptionPassword = pwd + default: + p.nextToken() + } + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } return stmt, nil } @@ -4342,8 +4570,77 @@ func (p *Parser) parseAlterExternalLibraryStatement() (*ast.AlterExternalLibrary // Parse name stmt.Name = p.parseIdentifier() - // Skip rest of statement - p.skipToEndOfStatement() + // Parse optional AUTHORIZATION clause + if strings.ToUpper(p.curTok.Literal) == "AUTHORIZATION" { + p.nextToken() // consume AUTHORIZATION + stmt.Owner = p.parseIdentifier() + } + + // Parse SET clause + if strings.ToUpper(p.curTok.Literal) == "SET" { + p.nextToken() // consume SET + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + optName := strings.ToUpper(p.curTok.Literal) + p.nextToken() // consume option name + + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + + if optName == "CONTENT" { + content, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.ExternalLibraryFiles = append(stmt.ExternalLibraryFiles, &ast.ExternalLibraryFileOption{ + Content: content, + }) + } + } + + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + } + } + + // Parse WITH clause + if p.curTok.Type == TokenWith { + p.nextToken() // consume WITH + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + optName := strings.ToUpper(p.curTok.Literal) + p.nextToken() // consume option name + + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + + if optName == "LANGUAGE" && p.curTok.Type == TokenString { + strLit, _ := p.parseStringLiteral() + stmt.Language = strLit + } + } + + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + } + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } return stmt, nil } @@ -4567,8 +4864,8 @@ func (p *Parser) parseSequenceOption() (interface{}, error) { } case "AS": p.nextToken() - // Parse data type - dataType, err := p.parseDataType() + // Parse data type - use parseDataTypeReference to preserve UserDataTypeReference + dataType, err := p.parseDataTypeReference() if err != nil { return nil, err } diff --git a/parser/parse_dml.go b/parser/parse_dml.go index 4ad897d3..fa3a0328 100644 --- a/parser/parse_dml.go +++ b/parser/parse_dml.go @@ -23,6 +23,15 @@ func (p *Parser) parseInsertStatement() (ast.Statement, error) { }, } + // Check for TOP clause + if p.curTok.Type == TokenTop { + top, err := p.parseTopRowFilter() + if err != nil { + return nil, err + } + stmt.InsertSpecification.TopRowFilter = top + } + // Check for INTO or OVER if p.curTok.Type == TokenInto { stmt.InsertSpecification.InsertOption = "Into" @@ -48,6 +57,20 @@ func (p *Parser) parseInsertStatement() (ast.Statement, error) { stmt.InsertSpecification.Columns = cols } + // Parse OUTPUT clauses (can have OUTPUT INTO followed by OUTPUT) + for p.curTok.Type == TokenIdent && strings.ToUpper(p.curTok.Literal) == "OUTPUT" { + outputClause, outputIntoClause, err := p.parseOutputClause() + if err != nil { + return nil, err + } + if outputIntoClause != nil { + stmt.InsertSpecification.OutputIntoClause = outputIntoClause + } + if outputClause != nil { + stmt.InsertSpecification.OutputClause = outputClause + } + } + // Parse insert source source, err := p.parseInsertSource() if err != nil { @@ -402,7 +425,7 @@ func (p *Parser) parseFunctionParameters() ([]ast.ScalarExpression, error) { return params, nil } -func (p *Parser) parseTableHints() ([]*ast.TableHint, error) { +func (p *Parser) parseTableHints() ([]ast.TableHintType, error) { // Consume WITH p.nextToken() @@ -411,15 +434,19 @@ func (p *Parser) parseTableHints() ([]*ast.TableHint, error) { } p.nextToken() - var hints []*ast.TableHint + var hints []ast.TableHintType for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { - if p.curTok.Type == TokenIdent || p.curTok.Type == TokenHoldlock || p.curTok.Type == TokenNowait { - hintKind := convertTableHintKind(p.curTok.Literal) - hints = append(hints, &ast.TableHint{HintKind: hintKind}) - p.nextToken() + hint, err := p.parseTableHint() + if err != nil { + return nil, err + } + if hint != nil { + hints = append(hints, hint) } if p.curTok.Type == TokenComma { p.nextToken() + } else if p.curTok.Type != TokenRParen { + break } } @@ -1664,3 +1691,87 @@ func (p *Parser) parseUpdateStatisticsStatementContinued() (*ast.UpdateStatistic return stmt, nil } +// parseOutputClause parses an OUTPUT clause (with optional INTO). +// Returns (outputClause, outputIntoClause, error). +// If INTO is present, outputIntoClause is set; otherwise outputClause is set. +func (p *Parser) parseOutputClause() (*ast.OutputClause, *ast.OutputIntoClause, error) { + // Consume OUTPUT + p.nextToken() + + // Parse select columns + var selectColumns []ast.SelectElement + for { + elem, err := p.parseSelectElement() + if err != nil { + return nil, nil, err + } + selectColumns = append(selectColumns, elem) + + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + + // Check for INTO + if p.curTok.Type == TokenInto { + p.nextToken() // consume INTO + + // Parse target table (variable or table name) + var intoTable ast.TableReference + if p.curTok.Type == TokenIdent && strings.HasPrefix(p.curTok.Literal, "@") { + name := p.curTok.Literal + p.nextToken() + intoTable = &ast.VariableTableReference{ + Variable: &ast.VariableReference{Name: name}, + ForPath: false, + } + } else { + son, err := p.parseSchemaObjectName() + if err != nil { + return nil, nil, err + } + intoTable = &ast.NamedTableReference{ + SchemaObject: son, + ForPath: false, + } + } + + // Parse optional column list + var intoColumns []*ast.ColumnReferenceExpression + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + colRef := &ast.ColumnReferenceExpression{ + ColumnType: "Regular", + MultiPartIdentifier: &ast.MultiPartIdentifier{ + Identifiers: []*ast.Identifier{p.parseIdentifier()}, + }, + } + colRef.MultiPartIdentifier.Count = len(colRef.MultiPartIdentifier.Identifiers) + intoColumns = append(intoColumns, colRef) + + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + } + + return nil, &ast.OutputIntoClause{ + SelectColumns: selectColumns, + IntoTable: intoTable, + IntoTableColumns: intoColumns, + }, nil + } + + return &ast.OutputClause{ + SelectColumns: selectColumns, + }, nil, nil +} + diff --git a/parser/parse_select.go b/parser/parse_select.go index 7ca4384b..7ab0107d 100644 --- a/parser/parse_select.go +++ b/parser/parse_select.go @@ -83,12 +83,13 @@ func (p *Parser) parseSelectStatement() (*ast.SelectStatement, error) { stmt := &ast.SelectStatement{} // Parse query expression (handles UNION, parens, etc.) - qe, into, err := p.parseQueryExpressionWithInto() + qe, into, on, err := p.parseQueryExpressionWithInto() if err != nil { return nil, err } stmt.QueryExpression = qe stmt.Into = into + stmt.On = on // Parse optional OPTION clause if p.curTok.Type == TokenOption { @@ -108,15 +109,15 @@ func (p *Parser) parseSelectStatement() (*ast.SelectStatement, error) { } func (p *Parser) parseQueryExpression() (ast.QueryExpression, error) { - qe, _, err := p.parseQueryExpressionWithInto() + qe, _, _, err := p.parseQueryExpressionWithInto() return qe, err } -func (p *Parser) parseQueryExpressionWithInto() (ast.QueryExpression, *ast.SchemaObjectName, error) { +func (p *Parser) parseQueryExpressionWithInto() (ast.QueryExpression, *ast.SchemaObjectName, *ast.Identifier, error) { // Parse primary query expression (could be SELECT or parenthesized) - left, into, err := p.parsePrimaryQueryExpression() + left, into, on, err := p.parsePrimaryQueryExpression() if err != nil { - return nil, nil, err + return nil, nil, nil, err } // Track if we have any binary operations @@ -144,14 +145,15 @@ func (p *Parser) parseQueryExpressionWithInto() (ast.QueryExpression, *ast.Schem } // Parse the right side - right, rightInto, err := p.parsePrimaryQueryExpression() + right, rightInto, rightOn, err := p.parsePrimaryQueryExpression() if err != nil { - return nil, nil, err + return nil, nil, nil, err } // INTO can only appear in the first query of a UNION if rightInto != nil && into == nil { into = rightInto + on = rightOn } bqe := &ast.BinaryQueryExpression{ @@ -168,7 +170,7 @@ func (p *Parser) parseQueryExpressionWithInto() (ast.QueryExpression, *ast.Schem if p.curTok.Type == TokenOrder { obc, err := p.parseOrderByClause() if err != nil { - return nil, nil, err + return nil, nil, nil, err } if hasBinaryOp { @@ -184,39 +186,45 @@ func (p *Parser) parseQueryExpressionWithInto() (ast.QueryExpression, *ast.Schem } } - return left, into, nil + return left, into, on, nil } -func (p *Parser) parsePrimaryQueryExpression() (ast.QueryExpression, *ast.SchemaObjectName, error) { +func (p *Parser) parsePrimaryQueryExpression() (ast.QueryExpression, *ast.SchemaObjectName, *ast.Identifier, error) { if p.curTok.Type == TokenLParen { p.nextToken() // consume ( - qe, into, err := p.parseQueryExpressionWithInto() + qe, into, on, err := p.parseQueryExpressionWithInto() if err != nil { - return nil, nil, err + return nil, nil, nil, err } if p.curTok.Type != TokenRParen { - return nil, nil, fmt.Errorf("expected ), got %s", p.curTok.Literal) + return nil, nil, nil, fmt.Errorf("expected ), got %s", p.curTok.Literal) } p.nextToken() // consume ) - return &ast.QueryParenthesisExpression{QueryExpression: qe}, into, nil + return &ast.QueryParenthesisExpression{QueryExpression: qe}, into, on, nil } return p.parseQuerySpecificationWithInto() } -func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *ast.SchemaObjectName, error) { +func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *ast.SchemaObjectName, *ast.Identifier, error) { qs, err := p.parseQuerySpecificationCore() if err != nil { - return nil, nil, err + return nil, nil, nil, err } // Check for INTO clause after SELECT elements, before FROM var into *ast.SchemaObjectName + var on *ast.Identifier if p.curTok.Type == TokenInto { p.nextToken() // consume INTO into, err = p.parseSchemaObjectName() if err != nil { - return nil, nil, err + return nil, nil, nil, err + } + // Check for ON filegroup clause + if strings.ToUpper(p.curTok.Literal) == "ON" { + p.nextToken() // consume ON + on = p.parseIdentifier() } } @@ -224,7 +232,7 @@ func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *as if p.curTok.Type == TokenFrom { fromClause, err := p.parseFromClause() if err != nil { - return nil, nil, err + return nil, nil, nil, err } qs.FromClause = fromClause } @@ -233,7 +241,7 @@ func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *as if p.curTok.Type == TokenWhere { whereClause, err := p.parseWhereClause() if err != nil { - return nil, nil, err + return nil, nil, nil, err } qs.WhereClause = whereClause } @@ -242,7 +250,7 @@ func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *as if p.curTok.Type == TokenGroup { groupByClause, err := p.parseGroupByClause() if err != nil { - return nil, nil, err + return nil, nil, nil, err } qs.GroupByClause = groupByClause } @@ -251,7 +259,7 @@ func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *as if p.curTok.Type == TokenHaving { havingClause, err := p.parseHavingClause() if err != nil { - return nil, nil, err + return nil, nil, nil, err } qs.HavingClause = havingClause } @@ -259,7 +267,7 @@ func (p *Parser) parseQuerySpecificationWithInto() (*ast.QuerySpecification, *as // Note: ORDER BY is parsed at the top level in parseQueryExpressionWithInto // to correctly handle UNION/EXCEPT/INTERSECT cases - return qs, into, nil + return qs, into, on, nil } func (p *Parser) parseQuerySpecificationCore() (*ast.QuerySpecification, error) { @@ -310,15 +318,30 @@ func (p *Parser) parseTopRowFilter() (*ast.TopRowFilter, error) { // Check for parenthesized expression if p.curTok.Type == TokenLParen { p.nextToken() // consume ( - expr, err := p.parseScalarExpression() - if err != nil { - return nil, err - } - top.Expression = expr - if p.curTok.Type != TokenRParen { - return nil, fmt.Errorf("expected ), got %s", p.curTok.Literal) + + // Check for subquery (SELECT ...) + if p.curTok.Type == TokenSelect { + qe, err := p.parseQueryExpression() + if err != nil { + return nil, err + } + if p.curTok.Type != TokenRParen { + return nil, fmt.Errorf("expected ), got %s", p.curTok.Literal) + } + p.nextToken() + top.Expression = &ast.ScalarSubquery{QueryExpression: qe} + } else { + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + // Wrap in ParenthesisExpression + top.Expression = &ast.ParenthesisExpression{Expression: expr} + if p.curTok.Type != TokenRParen { + return nil, fmt.Errorf("expected ), got %s", p.curTok.Literal) + } + p.nextToken() // consume ) } - p.nextToken() // consume ) } else { // Parse literal expression expr, err := p.parsePrimaryExpression() @@ -472,6 +495,14 @@ func (p *Parser) parseIdentifier() *ast.Identifier { if len(literal) >= 2 && literal[0] == '[' && literal[len(literal)-1] == ']' { quoteType = "SquareBracket" literal = literal[1 : len(literal)-1] + // Unescape ]] to ] + literal = strings.ReplaceAll(literal, "]]", "]") + } else if len(literal) >= 2 && literal[0] == '"' && literal[len(literal)-1] == '"' { + // Handle double-quoted identifiers + quoteType = "DoubleQuote" + literal = literal[1 : len(literal)-1] + // Unescape "" to " + literal = strings.ReplaceAll(literal, "\"\"", "\"") } id := &ast.Identifier{ @@ -643,6 +674,20 @@ func (p *Parser) parsePrimaryExpression() (ast.ScalarExpression, error) { p.nextToken() // consume N return p.parseNationalStringLiteral() } + // Check for CAST/CONVERT special functions + upper := strings.ToUpper(p.curTok.Literal) + if upper == "CAST" && p.peekTok.Type == TokenLParen { + return p.parseCastCall() + } + if upper == "CONVERT" && p.peekTok.Type == TokenLParen { + return p.parseConvertCall() + } + if upper == "TRY_CAST" && p.peekTok.Type == TokenLParen { + return p.parseTryCastCall() + } + if upper == "TRY_CONVERT" && p.peekTok.Type == TokenLParen { + return p.parseTryConvertCall() + } return p.parseColumnReferenceOrFunctionCall() case TokenNumber: val := p.curTok.Literal @@ -689,6 +734,10 @@ func (p *Parser) parsePrimaryExpression() (ast.ScalarExpression, error) { return p.parsePostExpressionAccess(&ast.ParenthesisExpression{Expression: expr}) case TokenCase: return p.parseCaseExpression() + case TokenStar: + // Wildcard column reference (e.g., * in count(*)) + p.nextToken() + return &ast.ColumnReferenceExpression{ColumnType: "Wildcard"}, nil default: return nil, fmt.Errorf("unexpected token in expression: %s", p.curTok.Literal) } @@ -1383,15 +1432,138 @@ func (p *Parser) parseNamedTableReference() (*ast.NamedTableReference, error) { } else if p.curTok.Type == TokenIdent { // Could be an alias without AS, but need to be careful not to consume keywords upper := strings.ToUpper(p.curTok.Literal) - if upper != "WHERE" && upper != "GROUP" && upper != "HAVING" && upper != "ORDER" && upper != "OPTION" && upper != "GO" { + if upper != "WHERE" && upper != "GROUP" && upper != "HAVING" && upper != "ORDER" && upper != "OPTION" && upper != "GO" && upper != "WITH" && upper != "ON" && upper != "JOIN" && upper != "INNER" && upper != "LEFT" && upper != "RIGHT" && upper != "FULL" && upper != "CROSS" && upper != "OUTER" { ref.Alias = &ast.Identifier{Value: p.curTok.Literal, QuoteType: "NotQuoted"} p.nextToken() } } + // Parse optional table hints WITH (hint, hint, ...) + if p.curTok.Type == TokenWith { + p.nextToken() // consume WITH + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + hint, err := p.parseTableHint() + if err != nil { + return nil, err + } + if hint != nil { + ref.TableHints = append(ref.TableHints, hint) + } + if p.curTok.Type == TokenComma { + p.nextToken() + } else if p.curTok.Type != TokenRParen { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + } + } + return ref, nil } +// parseTableHint parses a single table hint +func (p *Parser) parseTableHint() (ast.TableHintType, error) { + hintName := strings.ToUpper(p.curTok.Literal) + p.nextToken() // consume hint name + + // INDEX hint with values + if hintName == "INDEX" { + hint := &ast.IndexTableHint{ + HintKind: "Index", + } + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + var iov *ast.IdentifierOrValueExpression + if p.curTok.Type == TokenNumber { + iov = &ast.IdentifierOrValueExpression{ + Value: p.curTok.Literal, + ValueExpression: &ast.IntegerLiteral{ + LiteralType: "Integer", + Value: p.curTok.Literal, + }, + } + p.nextToken() + } else if p.curTok.Type == TokenIdent { + iov = &ast.IdentifierOrValueExpression{ + Value: p.curTok.Literal, + Identifier: &ast.Identifier{ + Value: p.curTok.Literal, + QuoteType: "NotQuoted", + }, + } + p.nextToken() + } + if iov != nil { + hint.IndexValues = append(hint.IndexValues, iov) + } + if p.curTok.Type == TokenComma { + p.nextToken() + } else if p.curTok.Type != TokenRParen { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } + } + return hint, nil + } + + // Map hint names to HintKind + hintKind := getTableHintKind(hintName) + if hintKind == "" { + return nil, nil // Unknown hint + } + + return &ast.TableHint{ + HintKind: hintKind, + }, nil +} + +// getTableHintKind maps SQL hint names to their AST HintKind values +func getTableHintKind(name string) string { + switch name { + case "HOLDLOCK": + return "HoldLock" + case "NOLOCK": + return "NoLock" + case "PAGLOCK": + return "PagLock" + case "READCOMMITTED": + return "ReadCommitted" + case "READPAST": + return "ReadPast" + case "READUNCOMMITTED": + return "ReadUncommitted" + case "REPEATABLEREAD": + return "RepeatableRead" + case "ROWLOCK": + return "Rowlock" + case "SERIALIZABLE": + return "Serializable" + case "SNAPSHOT": + return "Snapshot" + case "TABLOCK": + return "TabLock" + case "TABLOCKX": + return "TabLockX" + case "UPDLOCK": + return "UpdLock" + case "XLOCK": + return "XLock" + case "NOWAIT": + return "NoWait" + default: + return "" + } +} + func (p *Parser) parseSchemaObjectName() (*ast.SchemaObjectName, error) { var identifiers []*ast.Identifier @@ -1519,6 +1691,36 @@ func (p *Parser) parseOptimizerHint() (ast.OptimizerHintBase, error) { return &ast.OptimizerHint{HintKind: "Use"}, nil } + // Handle keyword tokens that can be optimizer hints (ORDER, GROUP, etc.) + if p.curTok.Type == TokenOrder || p.curTok.Type == TokenGroup { + hintKind := convertHintKind(p.curTok.Literal) + firstWord := strings.ToUpper(p.curTok.Literal) + p.nextToken() + + // Check for two-word hints like ORDER GROUP + if (firstWord == "ORDER" || firstWord == "HASH" || firstWord == "MERGE" || + firstWord == "CONCAT" || firstWord == "LOOP" || firstWord == "FORCE") && + (p.curTok.Type == TokenIdent || p.curTok.Type == TokenGroup) { + secondWord := strings.ToUpper(p.curTok.Literal) + if secondWord == "GROUP" || secondWord == "JOIN" || secondWord == "UNION" || + secondWord == "ORDER" { + hintKind = hintKind + convertHintKind(p.curTok.Literal) + p.nextToken() + } + } + return &ast.OptimizerHint{HintKind: hintKind}, nil + } + + // Handle TABLE HINT optimizer hint + if p.curTok.Type == TokenTable { + p.nextToken() // consume TABLE + if p.curTok.Type == TokenIdent && strings.ToUpper(p.curTok.Literal) == "HINT" { + p.nextToken() // consume HINT + return p.parseTableHintsOptimizerHint() + } + return &ast.OptimizerHint{HintKind: "Table"}, nil + } + if p.curTok.Type != TokenIdent && p.curTok.Type != TokenLabel { // Skip unknown tokens to avoid infinite loop p.nextToken() @@ -1628,8 +1830,21 @@ func (p *Parser) parseOptimizerHint() (ast.OptimizerHintBase, error) { default: // Handle generic hints hintKind := convertHintKind(p.curTok.Literal) + firstWord := strings.ToUpper(p.curTok.Literal) p.nextToken() + // Check for two-word hints like ORDER GROUP, HASH GROUP, etc. + if (firstWord == "ORDER" || firstWord == "HASH" || firstWord == "MERGE" || + firstWord == "CONCAT" || firstWord == "LOOP" || firstWord == "FORCE") && + p.curTok.Type == TokenIdent { + secondWord := strings.ToUpper(p.curTok.Literal) + if secondWord == "GROUP" || secondWord == "JOIN" || secondWord == "UNION" || + secondWord == "ORDER" { + hintKind = hintKind + convertHintKind(p.curTok.Literal) + p.nextToken() + } + } + // Check if this is a literal hint (LABEL = value, etc.) if p.curTok.Type == TokenEquals { p.nextToken() // consume = @@ -1643,6 +1858,53 @@ func (p *Parser) parseOptimizerHint() (ast.OptimizerHintBase, error) { } } +func (p *Parser) parseTableHintsOptimizerHint() (ast.OptimizerHintBase, error) { + hint := &ast.TableHintsOptimizerHint{ + HintKind: "TableHints", + } + + // Expect ( + if p.curTok.Type != TokenLParen { + return nil, fmt.Errorf("expected ( after TABLE HINT, got %s", p.curTok.Literal) + } + p.nextToken() // consume ( + + // Parse object name + objectName, err := p.parseSchemaObjectName() + if err != nil { + return nil, err + } + hint.ObjectName = objectName + + // Expect comma + if p.curTok.Type == TokenComma { + p.nextToken() // consume comma + } + + // Parse table hints + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + if p.curTok.Type == TokenComma { + p.nextToken() + continue + } + + tableHint, err := p.parseTableHint() + if err != nil { + return nil, err + } + if tableHint != nil { + hint.TableHints = append(hint.TableHints, tableHint) + } + } + + // Consume ) + if p.curTok.Type == TokenRParen { + p.nextToken() + } + + return hint, nil +} + func (p *Parser) parseOptimizeForHint() (ast.OptimizerHintBase, error) { hint := &ast.OptimizeForOptimizerHint{ HintKind: "OptimizeFor", @@ -2165,3 +2427,207 @@ func identifiersToSchemaObjectName(identifiers []*ast.Identifier) *ast.SchemaObj // ======================= New Statement Parsing Functions ======================= + +// parseCastCall parses a CAST expression: CAST(expression AS data_type) +func (p *Parser) parseCastCall() (ast.ScalarExpression, error) { + p.nextToken() // consume CAST + if p.curTok.Type != TokenLParen { + return nil, fmt.Errorf("expected ( after CAST, got %s", p.curTok.Literal) + } + p.nextToken() // consume ( + + // Parse the expression + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + + // Expect AS + if p.curTok.Type != TokenAs { + return nil, fmt.Errorf("expected AS in CAST, got %s", p.curTok.Literal) + } + p.nextToken() // consume AS + + // Parse the data type + dt, err := p.parseDataTypeReference() + if err != nil { + return nil, err + } + + // Expect ) + if p.curTok.Type != TokenRParen { + return nil, fmt.Errorf("expected ) in CAST, got %s", p.curTok.Literal) + } + p.nextToken() // consume ) + + cast := &ast.CastCall{ + DataType: dt, + Parameter: expr, + } + + // Check for COLLATE clause + if strings.ToUpper(p.curTok.Literal) == "COLLATE" { + p.nextToken() // consume COLLATE + cast.Collation = p.parseIdentifier() + } + + return cast, nil +} + +// parseConvertCall parses a CONVERT expression: CONVERT(data_type, expression [, style]) +func (p *Parser) parseConvertCall() (ast.ScalarExpression, error) { + p.nextToken() // consume CONVERT + if p.curTok.Type != TokenLParen { + return nil, fmt.Errorf("expected ( after CONVERT, got %s", p.curTok.Literal) + } + p.nextToken() // consume ( + + // Parse the data type first + dt, err := p.parseDataTypeReference() + if err != nil { + return nil, err + } + + // Expect comma + if p.curTok.Type != TokenComma { + return nil, fmt.Errorf("expected , in CONVERT, got %s", p.curTok.Literal) + } + p.nextToken() // consume , + + // Parse the expression + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + + convert := &ast.ConvertCall{ + DataType: dt, + Parameter: expr, + } + + // Check for optional style parameter + if p.curTok.Type == TokenComma { + p.nextToken() // consume , + style, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + convert.Style = style + } + + // Expect ) + if p.curTok.Type != TokenRParen { + return nil, fmt.Errorf("expected ) in CONVERT, got %s", p.curTok.Literal) + } + p.nextToken() // consume ) + + // Check for COLLATE clause + if strings.ToUpper(p.curTok.Literal) == "COLLATE" { + p.nextToken() // consume COLLATE + convert.Collation = p.parseIdentifier() + } + + return convert, nil +} + +// parseTryCastCall parses a TRY_CAST expression +func (p *Parser) parseTryCastCall() (ast.ScalarExpression, error) { + p.nextToken() // consume TRY_CAST + if p.curTok.Type != TokenLParen { + return nil, fmt.Errorf("expected ( after TRY_CAST, got %s", p.curTok.Literal) + } + p.nextToken() // consume ( + + // Parse the expression + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + + // Expect AS + if p.curTok.Type != TokenAs { + return nil, fmt.Errorf("expected AS in TRY_CAST, got %s", p.curTok.Literal) + } + p.nextToken() // consume AS + + // Parse the data type + dt, err := p.parseDataTypeReference() + if err != nil { + return nil, err + } + + // Expect ) + if p.curTok.Type != TokenRParen { + return nil, fmt.Errorf("expected ) in TRY_CAST, got %s", p.curTok.Literal) + } + p.nextToken() // consume ) + + cast := &ast.TryCastCall{ + DataType: dt, + Parameter: expr, + } + + // Check for COLLATE clause + if strings.ToUpper(p.curTok.Literal) == "COLLATE" { + p.nextToken() // consume COLLATE + cast.Collation = p.parseIdentifier() + } + + return cast, nil +} + +// parseTryConvertCall parses a TRY_CONVERT expression +func (p *Parser) parseTryConvertCall() (ast.ScalarExpression, error) { + p.nextToken() // consume TRY_CONVERT + if p.curTok.Type != TokenLParen { + return nil, fmt.Errorf("expected ( after TRY_CONVERT, got %s", p.curTok.Literal) + } + p.nextToken() // consume ( + + // Parse the data type first + dt, err := p.parseDataTypeReference() + if err != nil { + return nil, err + } + + // Expect comma + if p.curTok.Type != TokenComma { + return nil, fmt.Errorf("expected , in TRY_CONVERT, got %s", p.curTok.Literal) + } + p.nextToken() // consume , + + // Parse the expression + expr, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + + convert := &ast.TryConvertCall{ + DataType: dt, + Parameter: expr, + } + + // Check for optional style parameter + if p.curTok.Type == TokenComma { + p.nextToken() // consume , + style, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + convert.Style = style + } + + // Expect ) + if p.curTok.Type != TokenRParen { + return nil, fmt.Errorf("expected ) in TRY_CONVERT, got %s", p.curTok.Literal) + } + p.nextToken() // consume ) + + // Check for COLLATE clause + if strings.ToUpper(p.curTok.Literal) == "COLLATE" { + p.nextToken() // consume COLLATE + convert.Collation = p.parseIdentifier() + } + + return convert, nil +} diff --git a/parser/parse_statements.go b/parser/parse_statements.go index 22203fb8..a320f765 100644 --- a/parser/parse_statements.go +++ b/parser/parse_statements.go @@ -268,9 +268,42 @@ func (p *Parser) parseTableConstraint() (ast.TableConstraint, error) { constraint.IndexType = &ast.IndexType{IndexTypeKind: "NonClustered"} p.nextToken() } - // Skip the column list + // Parse the column list if p.curTok.Type == TokenLParen { - p.skipParenthesizedContent() + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + colRef := &ast.ColumnReferenceExpression{ + ColumnType: "Regular", + } + // Parse column name + colName := p.parseIdentifier() + colRef.MultiPartIdentifier = &ast.MultiPartIdentifier{ + Identifiers: []*ast.Identifier{colName}, + Count: 1, + } + // Check for sort order + sortOrder := ast.SortOrderNotSpecified + upperColNext := strings.ToUpper(p.curTok.Literal) + if upperColNext == "ASC" { + sortOrder = ast.SortOrderAscending + p.nextToken() + } else if upperColNext == "DESC" { + sortOrder = ast.SortOrderDescending + p.nextToken() + } + constraint.Columns = append(constraint.Columns, &ast.ColumnWithSortOrder{ + Column: colRef, + SortOrder: sortOrder, + }) + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() + } } return constraint, nil } else if upperLit == "FOREIGN" { @@ -564,7 +597,7 @@ func (p *Parser) parseDataTypeReference() (ast.DataTypeReference, error) { if p.curTok.Type == TokenIdent && strings.ToUpper(p.curTok.Literal) == "MAX" { dt.Parameters = append(dt.Parameters, &ast.MaxLiteral{ LiteralType: "Max", - Value: "MAX", + Value: p.curTok.Literal, }) p.nextToken() } else { @@ -629,6 +662,7 @@ func getSqlDataTypeOption(typeName string) (string, bool) { "ROWVERSION": "Rowversion", "TIMESTAMP": "Timestamp", "CONNECTION": "Connection", + "VECTOR": "Vector", } if mapped, ok := typeMap[strings.ToUpper(typeName)]; ok { return mapped, true @@ -939,6 +973,10 @@ func (p *Parser) parseBeginStatement() (ast.Statement, error) { } return nil, fmt.Errorf("expected TRANSACTION after DISTRIBUTED, got %s", p.curTok.Literal) } + // Check for ATOMIC + if strings.ToUpper(p.curTok.Literal) == "ATOMIC" { + return p.parseBeginAtomicBlockStatement() + } // Fall through to BEGIN...END block fallthrough default: @@ -946,6 +984,124 @@ func (p *Parser) parseBeginStatement() (ast.Statement, error) { } } +func (p *Parser) parseBeginAtomicBlockStatement() (*ast.BeginEndAtomicBlockStatement, error) { + p.nextToken() // consume ATOMIC + + stmt := &ast.BeginEndAtomicBlockStatement{ + StatementList: &ast.StatementList{}, + } + + // Parse WITH clause + if p.curTok.Type == TokenWith { + p.nextToken() // consume WITH + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + } + + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + optName := strings.ToUpper(p.curTok.Literal) + p.nextToken() // consume option name + + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + + switch optName { + case "TRANSACTION": + // TRANSACTION ISOLATION LEVEL = ... + if strings.ToUpper(p.curTok.Literal) == "ISOLATION" { + p.nextToken() // consume ISOLATION + if strings.ToUpper(p.curTok.Literal) == "LEVEL" { + p.nextToken() // consume LEVEL + } + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + } + // Parse the isolation level identifier + opt := &ast.IdentifierAtomicBlockOption{ + OptionKind: "IsolationLevel", + Value: p.parseIdentifier(), + } + stmt.Options = append(stmt.Options, opt) + case "LANGUAGE": + // Parse the language value + if p.curTok.Type == TokenString || p.curTok.Type == TokenNationalString { + value := p.curTok.Literal + isNational := p.curTok.Type == TokenNationalString + // Strip the N prefix and quotes from national strings + if isNational && len(value) >= 3 && (value[0] == 'N' || value[0] == 'n') && value[1] == '\'' { + value = value[2 : len(value)-1] + } else if len(value) >= 2 && value[0] == '\'' { + // Strip quotes from regular strings + value = value[1 : len(value)-1] + } + strLit := &ast.StringLiteral{ + LiteralType: "String", + Value: value, + IsNational: isNational, + IsLargeObject: false, + } + p.nextToken() + opt := &ast.LiteralAtomicBlockOption{ + OptionKind: "Language", + Value: strLit, + } + stmt.Options = append(stmt.Options, opt) + } else { + opt := &ast.IdentifierAtomicBlockOption{ + OptionKind: "Language", + Value: p.parseIdentifier(), + } + stmt.Options = append(stmt.Options, opt) + } + case "DATEFIRST", "DATEFORMAT": + opt := &ast.IdentifierAtomicBlockOption{ + OptionKind: optName, + Value: p.parseIdentifier(), + } + stmt.Options = append(stmt.Options, opt) + default: + // Skip unknown options + if p.curTok.Type == TokenIdent || p.curTok.Type == TokenString { + p.nextToken() + } + } + + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + + // Parse statements until END + for p.curTok.Type != TokenEnd && p.curTok.Type != TokenEOF { + s, err := p.parseStatement() + if err != nil { + return nil, err + } + if s != nil { + stmt.StatementList.Statements = append(stmt.StatementList.Statements, s) + } + } + + // Consume END + if p.curTok.Type == TokenEnd { + p.nextToken() + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + func (p *Parser) parseBeginTransactionStatementContinued(distributed bool) (*ast.BeginTransactionStatement, error) { // TRANSACTION or TRAN already consumed by caller p.nextToken() @@ -1247,7 +1403,16 @@ func (p *Parser) parseCreateStatement() (ast.Statement, error) { case "SPATIAL": return p.parseCreateSpatialIndexStatement() case "SERVER": - return p.parseCreateServerRoleStatement() + // Check if it's SERVER ROLE or SERVER AUDIT + p.nextToken() // consume SERVER + switch strings.ToUpper(p.curTok.Literal) { + case "ROLE": + return p.parseCreateServerRoleStatementContinued() + case "AUDIT": + return p.parseCreateServerAuditStatement() + default: + return nil, fmt.Errorf("expected ROLE or AUDIT after SERVER, got %s", p.curTok.Literal) + } } // Lenient: skip unknown CREATE statements p.skipToEndOfStatement() @@ -1394,6 +1559,16 @@ func (p *Parser) parseCreateServerRoleStatement() (*ast.CreateServerRoleStatemen } p.nextToken() // consume ROLE + return p.parseCreateServerRoleStatementBody() +} + +func (p *Parser) parseCreateServerRoleStatementContinued() (*ast.CreateServerRoleStatement, error) { + // ROLE keyword should be current token, consume it + p.nextToken() + return p.parseCreateServerRoleStatementBody() +} + +func (p *Parser) parseCreateServerRoleStatementBody() (*ast.CreateServerRoleStatement, error) { stmt := &ast.CreateServerRoleStatement{} // Parse role name @@ -1413,6 +1588,304 @@ func (p *Parser) parseCreateServerRoleStatement() (*ast.CreateServerRoleStatemen return stmt, nil } +func (p *Parser) parseCreateServerAuditStatement() (*ast.CreateServerAuditStatement, error) { + // AUDIT keyword should be current token, consume it + p.nextToken() + + stmt := &ast.CreateServerAuditStatement{} + + // Parse audit name + stmt.AuditName = p.parseIdentifier() + + // Parse TO clause (audit target) + if strings.ToUpper(p.curTok.Literal) == "TO" { + p.nextToken() // consume TO + target, err := p.parseAuditTarget() + if err != nil { + return nil, err + } + stmt.AuditTarget = target + } + + // Parse WITH clause (options) + if strings.ToUpper(p.curTok.Literal) == "WITH" { + p.nextToken() // consume WITH + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + opt, err := p.parseAuditOption() + if err != nil { + return nil, err + } + stmt.Options = append(stmt.Options, opt) + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + } + + // Parse WHERE clause (predicate) + if strings.ToUpper(p.curTok.Literal) == "WHERE" { + p.nextToken() // consume WHERE + pred, err := p.parseAuditPredicate() + if err != nil { + return nil, err + } + stmt.PredicateExpression = pred + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + +func (p *Parser) parseAuditTarget() (*ast.AuditTarget, error) { + target := &ast.AuditTarget{} + + // Parse target kind (FILE, APPLICATION_LOG, SECURITY_LOG) + switch strings.ToUpper(p.curTok.Literal) { + case "FILE": + target.TargetKind = "File" + case "APPLICATION_LOG": + target.TargetKind = "ApplicationLog" + case "SECURITY_LOG": + target.TargetKind = "SecurityLog" + default: + target.TargetKind = capitalizeFirst(p.curTok.Literal) + } + p.nextToken() + + // Parse target options in parentheses + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + opt, err := p.parseAuditTargetOption() + if err != nil { + return nil, err + } + target.TargetOptions = append(target.TargetOptions, opt) + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + + return target, nil +} + +func (p *Parser) parseAuditTargetOption() (ast.AuditTargetOption, error) { + optName := strings.ToUpper(p.curTok.Literal) + p.nextToken() + + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after audit target option, got %s", p.curTok.Literal) + } + p.nextToken() + + // Parse value + val, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + + optKind := "" + switch optName { + case "FILEPATH": + optKind = "FilePath" + case "MAX_FILES": + optKind = "MaxFiles" + case "MAX_ROLLOVER_FILES": + optKind = "MaxRolloverFiles" + case "MAXSIZE": + optKind = "MaxSize" + case "RESERVE_DISK_SPACE": + optKind = "ReserveDiskSpace" + default: + optKind = capitalizeFirst(strings.ToLower(optName)) + } + + return &ast.LiteralAuditTargetOption{ + OptionKind: optKind, + Value: val, + }, nil +} + +func (p *Parser) parseAuditOption() (ast.AuditOption, error) { + optName := strings.ToUpper(p.curTok.Literal) + p.nextToken() + + switch optName { + case "ON_FAILURE": + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after ON_FAILURE, got %s", p.curTok.Literal) + } + p.nextToken() + action := "" + switch strings.ToUpper(p.curTok.Literal) { + case "CONTINUE": + action = "Continue" + case "SHUTDOWN": + action = "Shutdown" + case "FAIL_OPERATION": + action = "FailOperation" + default: + action = capitalizeFirst(strings.ToLower(p.curTok.Literal)) + } + p.nextToken() + return &ast.OnFailureAuditOption{ + OptionKind: "OnFailure", + OnFailureAction: action, + }, nil + case "QUEUE_DELAY": + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after QUEUE_DELAY, got %s", p.curTok.Literal) + } + p.nextToken() + val, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + return &ast.QueueDelayAuditOption{ + OptionKind: "QueueDelay", + Delay: val, + }, nil + case "STATE": + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after STATE, got %s", p.curTok.Literal) + } + p.nextToken() + value := capitalizeFirst(strings.ToLower(p.curTok.Literal)) + p.nextToken() + return &ast.StateAuditOption{ + OptionKind: "State", + Value: value, + }, nil + case "AUDIT_GUID": + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after AUDIT_GUID, got %s", p.curTok.Literal) + } + p.nextToken() + val, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + return &ast.AuditGuidAuditOption{ + OptionKind: "AuditGuid", + Guid: val, + }, nil + default: + return nil, fmt.Errorf("unknown audit option: %s", optName) + } +} + +func (p *Parser) parseAuditPredicate() (ast.BooleanExpression, error) { + return p.parseAuditBooleanExpression() +} + +func (p *Parser) parseAuditBooleanExpression() (ast.BooleanExpression, error) { + // Parse first operand + left, err := p.parseAuditBooleanPrimary() + if err != nil { + return nil, err + } + + // Check for AND/OR + for strings.ToUpper(p.curTok.Literal) == "AND" || strings.ToUpper(p.curTok.Literal) == "OR" { + op := strings.ToUpper(p.curTok.Literal) + p.nextToken() + right, err := p.parseAuditBooleanPrimary() + if err != nil { + return nil, err + } + var binaryType string + if op == "AND" { + binaryType = "And" + } else { + binaryType = "Or" + } + left = &ast.BooleanBinaryExpression{ + BinaryExpressionType: binaryType, + FirstExpression: left, + SecondExpression: right, + } + } + + return left, nil +} + +func (p *Parser) parseAuditBooleanPrimary() (ast.BooleanExpression, error) { + // For audit predicates, the left side is a SourceDeclaration + // which wraps an EventSessionObjectName + var identifiers []*ast.Identifier + identifiers = append(identifiers, p.parseIdentifier()) + + // Check for multi-part identifier + for p.curTok.Type == TokenDot { + p.nextToken() // consume . + identifiers = append(identifiers, p.parseIdentifier()) + } + + sourceDecl := &ast.SourceDeclaration{ + Value: &ast.EventSessionObjectName{ + MultiPartIdentifier: &ast.MultiPartIdentifier{ + Count: len(identifiers), + Identifiers: identifiers, + }, + }, + } + + // Now parse comparison operator and right side + compType := "" + switch p.curTok.Type { + case TokenEquals: + compType = "Equals" + case TokenNotEqual: + compType = "NotEqualToBrackets" + case TokenLessThan: + compType = "LessThan" + case TokenGreaterThan: + compType = "GreaterThan" + case TokenLessOrEqual: + compType = "LessThanOrEqualTo" + case TokenGreaterOrEqual: + compType = "GreaterThanOrEqualTo" + default: + return nil, fmt.Errorf("expected comparison operator, got %s", p.curTok.Literal) + } + p.nextToken() + + // Parse right side + right, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + + return &ast.BooleanComparisonExpression{ + ComparisonType: compType, + FirstExpression: sourceDecl, + SecondExpression: right, + }, nil +} + func (p *Parser) parseCreateContractStatement() (*ast.CreateContractStatement, error) { // Consume CONTRACT p.nextToken() @@ -3799,25 +4272,35 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { return p.parseBackupCertificateStatement() } - stmt := &ast.BackupDatabaseStatement{} + // Check for SERVICE MASTER KEY + if strings.ToUpper(p.curTok.Literal) == "SERVICE" { + return p.parseBackupServiceMasterKeyStatement() + } - // Expect DATABASE - if p.curTok.Type != TokenDatabase { - return nil, fmt.Errorf("expected DATABASE after BACKUP, got %s", p.curTok.Literal) + // Check for DATABASE or LOG + isLog := false + if p.curTok.Type == TokenDatabase { + p.nextToken() + } else if strings.ToUpper(p.curTok.Literal) == "LOG" { + isLog = true + p.nextToken() + } else { + return nil, fmt.Errorf("expected DATABASE or LOG after BACKUP, got %s", p.curTok.Literal) } - p.nextToken() // Parse database name + var dbName *ast.IdentifierOrValueExpression if p.curTok.Type == TokenIdent && len(p.curTok.Literal) > 0 && p.curTok.Literal[0] == '@' { - stmt.DatabaseName = &ast.IdentifierOrValueExpression{ + dbName = &ast.IdentifierOrValueExpression{ Value: p.curTok.Literal, ValueExpression: &ast.VariableReference{ Name: p.curTok.Literal, }, } + p.nextToken() } else { id := p.parseIdentifier() - stmt.DatabaseName = &ast.IdentifierOrValueExpression{ + dbName = &ast.IdentifierOrValueExpression{ Value: id.Value, Identifier: id, } @@ -3830,6 +4313,7 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { p.nextToken() // Parse devices + var devices []*ast.DeviceInfo for { device := &ast.DeviceInfo{ DeviceType: "None", @@ -3837,7 +4321,9 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { // Check for device type (DISK, TAPE, URL, etc.) deviceType := strings.ToUpper(p.curTok.Literal) + hasPhysicalType := false if deviceType == "DISK" || deviceType == "TAPE" || deviceType == "URL" || deviceType == "VIRTUAL_DEVICE" { + hasPhysicalType = true switch deviceType { case "DISK": device.DeviceType = "Disk" @@ -3855,33 +4341,52 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { p.nextToken() } - // Parse logical device name (identifier or variable) - if p.curTok.Type == TokenIdent && len(p.curTok.Literal) > 0 && p.curTok.Literal[0] == '@' { - device.LogicalDevice = &ast.IdentifierOrValueExpression{ - Value: p.curTok.Literal, - ValueExpression: &ast.VariableReference{ + // Parse device name + if hasPhysicalType { + // Physical device: use PhysicalDevice field with ScalarExpression + if p.curTok.Type == TokenIdent && len(p.curTok.Literal) > 0 && p.curTok.Literal[0] == '@' { + device.PhysicalDevice = &ast.VariableReference{ Name: p.curTok.Literal, - }, - } - p.nextToken() - } else if p.curTok.Type == TokenString { - str, err := p.parseStringLiteral() - if err != nil { - return nil, err - } - device.LogicalDevice = &ast.IdentifierOrValueExpression{ - Value: str.Value, - ValueExpression: str, + } + p.nextToken() + } else if p.curTok.Type == TokenString { + str, err := p.parseStringLiteral() + if err != nil { + return nil, err + } + device.PhysicalDevice = str + } else { + return nil, fmt.Errorf("expected string or variable for physical device, got %s", p.curTok.Literal) } } else { - id := p.parseIdentifier() - device.LogicalDevice = &ast.IdentifierOrValueExpression{ - Value: id.Value, - Identifier: id, + // Logical device: use LogicalDevice field with IdentifierOrValueExpression + if p.curTok.Type == TokenIdent && len(p.curTok.Literal) > 0 && p.curTok.Literal[0] == '@' { + device.LogicalDevice = &ast.IdentifierOrValueExpression{ + Value: p.curTok.Literal, + ValueExpression: &ast.VariableReference{ + Name: p.curTok.Literal, + }, + } + p.nextToken() + } else if p.curTok.Type == TokenString { + str, err := p.parseStringLiteral() + if err != nil { + return nil, err + } + device.LogicalDevice = &ast.IdentifierOrValueExpression{ + Value: str.Value, + ValueExpression: str, + } + } else { + id := p.parseIdentifier() + device.LogicalDevice = &ast.IdentifierOrValueExpression{ + Value: id.Value, + Identifier: id, + } } } - stmt.Devices = append(stmt.Devices, device) + devices = append(devices, device) // Check for comma (more devices) if p.curTok.Type == TokenComma { @@ -3892,6 +4397,7 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { } // Parse optional WITH clause + var options []*ast.BackupOption if p.curTok.Type == TokenWith { p.nextToken() @@ -3935,7 +4441,7 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { option.Value = val } - stmt.Options = append(stmt.Options, option) + options = append(options, option) if p.curTok.Type == TokenComma { p.nextToken() @@ -3950,7 +4456,18 @@ func (p *Parser) parseBackupStatement() (ast.Statement, error) { p.nextToken() } - return stmt, nil + if isLog { + return &ast.BackupTransactionLogStatement{ + DatabaseName: dbName, + Devices: devices, + Options: options, + }, nil + } + return &ast.BackupDatabaseStatement{ + DatabaseName: dbName, + Devices: devices, + Options: options, + }, nil } func (p *Parser) parseBackupCertificateStatement() (*ast.BackupCertificateStatement, error) { @@ -4089,6 +4606,76 @@ func (p *Parser) parseBackupCertificateStatement() (*ast.BackupCertificateStatem return stmt, nil } +func (p *Parser) parseBackupServiceMasterKeyStatement() (*ast.BackupServiceMasterKeyStatement, error) { + // Consume SERVICE + p.nextToken() + + // Expect MASTER + if strings.ToUpper(p.curTok.Literal) != "MASTER" { + return nil, fmt.Errorf("expected MASTER after SERVICE, got %s", p.curTok.Literal) + } + p.nextToken() + + // Expect KEY + if p.curTok.Type != TokenKey { + return nil, fmt.Errorf("expected KEY after MASTER, got %s", p.curTok.Literal) + } + p.nextToken() + + stmt := &ast.BackupServiceMasterKeyStatement{} + + // Expect TO + if p.curTok.Type != TokenTo { + return nil, fmt.Errorf("expected TO after SERVICE MASTER KEY, got %s", p.curTok.Literal) + } + p.nextToken() + + // Expect FILE + if strings.ToUpper(p.curTok.Literal) != "FILE" { + return nil, fmt.Errorf("expected FILE after TO, got %s", p.curTok.Literal) + } + p.nextToken() + + // Expect = + if p.curTok.Type != TokenEquals { + return nil, fmt.Errorf("expected = after FILE, got %s", p.curTok.Literal) + } + p.nextToken() + + // Parse file path + file, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.File = file + + // Parse ENCRYPTION BY PASSWORD clause + if strings.ToUpper(p.curTok.Literal) == "ENCRYPTION" { + p.nextToken() // consume ENCRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + if p.curTok.Type == TokenEquals { + p.nextToken() + } + pwd, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.Password = pwd + } + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + func (p *Parser) parseCloseStatement() (ast.Statement, error) { p.nextToken() // consume CLOSE @@ -4488,10 +5075,83 @@ func (p *Parser) parseCreateExternalLibraryStatement() (*ast.CreateExternalLibra stmt := &ast.CreateExternalLibraryStatement{ Name: p.parseIdentifier(), } - // Skip rest of statement for now - for p.curTok.Type != TokenSemicolon && p.curTok.Type != TokenEOF && !p.isStatementTerminator() { - p.nextToken() + + // Parse optional AUTHORIZATION + if strings.ToUpper(p.curTok.Literal) == "AUTHORIZATION" { + p.nextToken() // consume AUTHORIZATION + stmt.Owner = p.parseIdentifier() + } + + // Parse FROM clause + if p.curTok.Type == TokenFrom { + p.nextToken() // consume FROM + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + fileOption := &ast.ExternalLibraryFileOption{} + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + switch strings.ToUpper(p.curTok.Literal) { + case "CONTENT": + p.nextToken() // consume CONTENT + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + content, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + fileOption.Content = content + case "PLATFORM": + p.nextToken() // consume PLATFORM + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + fileOption.Platform = p.parseIdentifier() + default: + p.nextToken() + } + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + if fileOption.Content != nil { + stmt.ExternalLibraryFiles = append(stmt.ExternalLibraryFiles, fileOption) + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + } + + // Parse WITH clause + if p.curTok.Type == TokenWith { + p.nextToken() // consume WITH + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + if p.curTok.Type == TokenLanguage || strings.ToUpper(p.curTok.Literal) == "LANGUAGE" { + p.nextToken() // consume LANGUAGE + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + lang, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.Language = lang + } else { + p.nextToken() + } + if p.curTok.Type == TokenComma { + p.nextToken() + } + } + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } } + + // Skip optional semicolon if p.curTok.Type == TokenSemicolon { p.nextToken() } @@ -4897,6 +5557,43 @@ func (p *Parser) parseCreateDatabaseStatement() (ast.Statement, error) { AttachMode: "None", } + // Check for Azure-style parenthesized options (maxsize=1gb, edition='web') + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + opts, err := p.parseAzureDatabaseOptions() + if err != nil { + return nil, err + } + stmt.Options = opts + if p.curTok.Type == TokenRParen { + p.nextToken() // consume ) + } + } + + // Check for AS COPY OF syntax + if p.curTok.Type == TokenAs { + p.nextToken() // consume AS + if strings.ToUpper(p.curTok.Literal) == "COPY" { + p.nextToken() // consume COPY + if p.curTok.Type == TokenOf { + p.nextToken() // consume OF + // Parse multi-part identifier (server.database or just database) + multiPart := &ast.MultiPartIdentifier{} + for { + id := p.parseIdentifier() + multiPart.Identifiers = append(multiPart.Identifiers, id) + if p.curTok.Type == TokenDot { + p.nextToken() // consume dot + } else { + break + } + } + multiPart.Count = len(multiPart.Identifiers) + stmt.CopyOf = multiPart + } + } + } + // Check for WITH clause if p.curTok.Type == TokenWith { p.nextToken() // consume WITH @@ -4904,7 +5601,7 @@ func (p *Parser) parseCreateDatabaseStatement() (ast.Statement, error) { if err != nil { return nil, err } - stmt.Options = opts + stmt.Options = append(stmt.Options, opts...) } // Skip rest of statement @@ -4960,6 +5657,79 @@ func (p *Parser) parseCreateDatabaseOptions() ([]ast.CreateDatabaseOption, error return options, nil } +func (p *Parser) parseAzureDatabaseOptions() ([]ast.CreateDatabaseOption, error) { + var options []ast.CreateDatabaseOption + + for p.curTok.Type != TokenRParen && p.curTok.Type != TokenEOF { + if p.curTok.Type == TokenComma { + p.nextToken() + continue + } + + optName := strings.ToUpper(p.curTok.Literal) + p.nextToken() // consume option name + + // Expect = + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + + switch optName { + case "MAXSIZE": + // Parse maxsize value and unit (e.g., "1gb", "5 gb") + maxSizeValue := p.curTok.Literal + p.nextToken() // consume value + + // Check for unit (GB, TB, etc.) - might be attached or separate + var units string + upperVal := strings.ToUpper(maxSizeValue) + if strings.HasSuffix(upperVal, "GB") { + units = "GB" + maxSizeValue = strings.TrimSuffix(upperVal, "GB") + } else if strings.HasSuffix(upperVal, "TB") { + units = "TB" + maxSizeValue = strings.TrimSuffix(upperVal, "TB") + } else if strings.HasSuffix(upperVal, "MB") { + units = "MB" + maxSizeValue = strings.TrimSuffix(upperVal, "MB") + } else { + // Unit might be separate token + if p.curTok.Type == TokenIdent { + units = strings.ToUpper(p.curTok.Literal) + p.nextToken() + } + } + + opt := &ast.MaxSizeDatabaseOption{ + OptionKind: "MaxSize", + MaxSize: &ast.IntegerLiteral{ + LiteralType: "Integer", + Value: maxSizeValue, + }, + Units: units, + } + options = append(options, opt) + + case "EDITION": + // Parse edition value (string literal) + value, _ := p.parseStringLiteral() + opt := &ast.LiteralDatabaseOption{ + OptionKind: "Edition", + Value: value, + } + options = append(options, opt) + + default: + // Skip unknown option value + if p.curTok.Type != TokenComma && p.curTok.Type != TokenRParen { + p.nextToken() + } + } + } + + return options, nil +} + func (p *Parser) parseCreateLoginStatement() (*ast.CreateLoginStatement, error) { p.nextToken() // consume LOGIN @@ -5282,8 +6052,114 @@ func (p *Parser) parseCreateAsymmetricKeyStatement() (*ast.CreateAsymmetricKeySt Name: p.parseIdentifier(), } - // Skip rest of statement - p.skipToEndOfStatement() + // Check for FROM PROVIDER + if p.curTok.Type == TokenFrom { + p.nextToken() // consume FROM + if strings.ToUpper(p.curTok.Literal) == "PROVIDER" { + p.nextToken() // consume PROVIDER + source := &ast.ProviderEncryptionSource{ + Name: p.parseIdentifier(), + } + stmt.EncryptionAlgorithm = "None" + + // Check for WITH options + if p.curTok.Type == TokenWith { + p.nextToken() // consume WITH + for { + optName := strings.ToUpper(p.curTok.Literal) + switch optName { + case "ALGORITHM": + p.nextToken() // consume ALGORITHM + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + alg := strings.ToUpper(p.curTok.Literal) + // Map algorithm names to proper case + algMap := map[string]string{ + "DES": "Des", + "RC2": "RC2", + "RC4": "RC4", + "RC4_128": "RC4_128", + "TRIPLE_DES": "TripleDes", + "AES_128": "Aes128", + "AES_192": "Aes192", + "AES_256": "Aes256", + "RSA_512": "Rsa512", + "RSA_1024": "Rsa1024", + "RSA_2048": "Rsa2048", + "RSA_3072": "Rsa3072", + "RSA_4096": "Rsa4096", + "DESX": "DesX", + "TRIPLE_DES_3KEY": "TripleDes3Key", + } + mappedAlg := alg + if mapped, ok := algMap[alg]; ok { + mappedAlg = mapped + } + source.KeyOptions = append(source.KeyOptions, &ast.AlgorithmKeyOption{ + Algorithm: mappedAlg, + OptionKind: "Algorithm", + }) + p.nextToken() + case "PROVIDER_KEY_NAME": + p.nextToken() // consume PROVIDER_KEY_NAME + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + keyName, _ := p.parseStringLiteral() + source.KeyOptions = append(source.KeyOptions, &ast.ProviderKeyNameKeyOption{ + KeyName: keyName, + OptionKind: "ProviderKeyName", + }) + case "CREATION_DISPOSITION": + p.nextToken() // consume CREATION_DISPOSITION + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + isCreateNew := strings.ToUpper(p.curTok.Literal) == "CREATE_NEW" + source.KeyOptions = append(source.KeyOptions, &ast.CreationDispositionKeyOption{ + IsCreateNew: isCreateNew, + OptionKind: "CreationDisposition", + }) + p.nextToken() + default: + goto doneWithOptions + } + + if p.curTok.Type == TokenComma { + p.nextToken() // consume comma + } else if strings.ToUpper(p.curTok.Literal) != "ALGORITHM" && + strings.ToUpper(p.curTok.Literal) != "PROVIDER_KEY_NAME" && + strings.ToUpper(p.curTok.Literal) != "CREATION_DISPOSITION" { + break + } + } + doneWithOptions: + } + stmt.KeySource = source + } + } + + // Check for ENCRYPTION BY PASSWORD + if strings.ToUpper(p.curTok.Literal) == "ENCRYPTION" { + p.nextToken() // consume ENCRYPTION + if strings.ToUpper(p.curTok.Literal) == "BY" { + p.nextToken() // consume BY + } + if strings.ToUpper(p.curTok.Literal) == "PASSWORD" { + p.nextToken() // consume PASSWORD + } + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + password, _ := p.parseStringLiteral() + stmt.Password = password + } + + // Skip optional semicolon and rest of statement + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } return stmt, nil } @@ -5656,12 +6532,7 @@ func (p *Parser) parseCreateFulltextStatement() (ast.Statement, error) { switch strings.ToUpper(p.curTok.Literal) { case "CATALOG": - p.nextToken() // consume CATALOG - stmt := &ast.CreateFulltextCatalogStatement{ - Name: p.parseIdentifier(), - } - p.skipToEndOfStatement() - return stmt, nil + return p.parseCreateFulltextCatalogStatement() case "INDEX": p.nextToken() // consume INDEX // FULLTEXT INDEX ON table_name @@ -5676,7 +6547,7 @@ func (p *Parser) parseCreateFulltextStatement() (ast.Statement, error) { return stmt, nil default: // Just create a catalog statement as default - stmt := &ast.CreateFulltextCatalogStatement{ + stmt := &ast.CreateFullTextCatalogStatement{ Name: p.parseIdentifier(), } p.skipToEndOfStatement() @@ -5684,6 +6555,86 @@ func (p *Parser) parseCreateFulltextStatement() (ast.Statement, error) { } } +func (p *Parser) parseCreateFulltextCatalogStatement() (*ast.CreateFullTextCatalogStatement, error) { + p.nextToken() // consume CATALOG + + stmt := &ast.CreateFullTextCatalogStatement{ + Name: p.parseIdentifier(), + } + + // Parse optional clauses + for p.curTok.Type != TokenEOF && p.curTok.Type != TokenSemicolon && !p.isBatchSeparator() { + switch strings.ToUpper(p.curTok.Literal) { + case "ON": + p.nextToken() // consume ON + if strings.ToUpper(p.curTok.Literal) == "FILEGROUP" { + p.nextToken() // consume FILEGROUP + stmt.FileGroup = p.parseIdentifier() + } + case "IN": + p.nextToken() // consume IN + if strings.ToUpper(p.curTok.Literal) == "PATH" { + p.nextToken() // consume PATH + path, err := p.parseScalarExpression() + if err != nil { + return nil, err + } + stmt.Path = path + } + case "WITH": + p.nextToken() // consume WITH + // Parse options like ACCENT_SENSITIVITY = ON/OFF + for { + if strings.ToUpper(p.curTok.Literal) == "ACCENT_SENSITIVITY" { + p.nextToken() // consume ACCENT_SENSITIVITY + if p.curTok.Type == TokenEquals { + p.nextToken() // consume = + } + opt := &ast.OnOffFullTextCatalogOption{ + OptionKind: "AccentSensitivity", + } + if strings.ToUpper(p.curTok.Literal) == "ON" { + opt.OptionState = "On" + } else { + opt.OptionState = "Off" + } + p.nextToken() // consume ON/OFF + stmt.Options = append(stmt.Options, opt) + } else { + break + } + if p.curTok.Type == TokenComma { + p.nextToken() + } else { + break + } + } + case "AS": + p.nextToken() // consume AS + if strings.ToUpper(p.curTok.Literal) == "DEFAULT" { + p.nextToken() // consume DEFAULT + stmt.IsDefault = true + } + case "AUTHORIZATION": + p.nextToken() // consume AUTHORIZATION + stmt.Owner = p.parseIdentifier() + default: + // Unknown clause, skip this token + if p.curTok.Type == TokenSemicolon || p.isBatchSeparator() { + break + } + p.nextToken() + } + } + + // Skip optional semicolon + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + + return stmt, nil +} + func (p *Parser) parseCreateRemoteServiceBindingStatement() (*ast.CreateRemoteServiceBindingStatement, error) { p.nextToken() // consume REMOTE if strings.ToUpper(p.curTok.Literal) == "SERVICE" { @@ -5805,17 +6756,135 @@ func (p *Parser) parseCreateStatisticsStatement() (*ast.CreateStatisticsStatemen return stmt, nil } -func (p *Parser) parseCreateTypeStatement() (*ast.CreateTypeStatement, error) { +func (p *Parser) parseCreateTypeStatement() (ast.Statement, error) { p.nextToken() // consume TYPE name, _ := p.parseSchemaObjectName() - stmt := &ast.CreateTypeStatement{ - Name: name, - } - // Skip rest of statement - p.skipToEndOfStatement() - return stmt, nil + // Check what follows the type name + switch strings.ToUpper(p.curTok.Literal) { + case "FROM": + // CREATE TYPE ... FROM (User-Defined Data Type) + p.nextToken() // consume FROM + // Check if there's a valid data type to parse + if p.curTok.Type == TokenEOF || p.curTok.Type == TokenSemicolon { + // Incomplete statement - fall through to generic type + stmt := &ast.CreateTypeStatement{ + Name: name, + } + p.skipToEndOfStatement() + return stmt, nil + } + dataType, err := p.parseDataTypeReference() + if err != nil { + // Fall back to generic type on error + stmt := &ast.CreateTypeStatement{ + Name: name, + } + p.skipToEndOfStatement() + return stmt, nil + } + stmt := &ast.CreateTypeUddtStatement{ + Name: name, + DataType: dataType, + } + // Check for NULL / NOT NULL + if p.curTok.Type == TokenNull { + stmt.NullableConstraint = &ast.NullableConstraintDefinition{Nullable: true} + p.nextToken() + } else if p.curTok.Type == TokenNot { + p.nextToken() // consume NOT + if p.curTok.Type == TokenNull { + p.nextToken() // consume NULL + } + stmt.NullableConstraint = &ast.NullableConstraintDefinition{Nullable: false} + } + // Skip semicolon if present + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + return stmt, nil + case "EXTERNAL": + // CREATE TYPE ... EXTERNAL NAME (CLR User-Defined Type) + p.nextToken() // consume EXTERNAL + if strings.ToUpper(p.curTok.Literal) != "NAME" { + // Incomplete statement - fall back to generic type + stmt := &ast.CreateTypeStatement{ + Name: name, + } + p.skipToEndOfStatement() + return stmt, nil + } + p.nextToken() // consume NAME + // Check if there's something to parse + if p.curTok.Type == TokenEOF || p.curTok.Type == TokenSemicolon { + // Incomplete statement - fall back to generic type + stmt := &ast.CreateTypeStatement{ + Name: name, + } + p.skipToEndOfStatement() + return stmt, nil + } + // Parse assembly name (could be [AssemblyName] or AssemblyName.[ClassName]) + assemblyName := &ast.AssemblyName{} + firstIdent := p.parseIdentifier() + assemblyName.Name = firstIdent + // Check for dot and class name + if p.curTok.Type == TokenDot { + p.nextToken() // consume dot + className := p.parseIdentifier() + assemblyName.ClassName = className + } + stmt := &ast.CreateTypeUdtStatement{ + Name: name, + AssemblyName: assemblyName, + } + // Skip semicolon if present + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + return stmt, nil + case "AS": + // Check if this is AS TABLE + p.nextToken() // consume AS + if strings.ToUpper(p.curTok.Literal) == "TABLE" { + p.nextToken() // consume TABLE + // Parse the table definition + if p.curTok.Type == TokenLParen { + p.nextToken() // consume ( + tableDef, err := p.parseTableDefinitionBody() + if err != nil { + stmt := &ast.CreateTypeStatement{ + Name: name, + } + p.skipToEndOfStatement() + return stmt, nil + } + stmt := &ast.CreateTypeTableStatement{ + Name: name, + Definition: tableDef, + } + // Skip closing paren if present + if p.curTok.Type == TokenRParen { + p.nextToken() + } + // Skip semicolon if present + if p.curTok.Type == TokenSemicolon { + p.nextToken() + } + return stmt, nil + } + } + // Fall through to generic type + fallthrough + default: + // Generic CREATE TYPE statement + stmt := &ast.CreateTypeStatement{ + Name: name, + } + p.skipToEndOfStatement() + return stmt, nil + } } func (p *Parser) parseCreateXmlIndexStatement() (*ast.CreateXmlIndexStatement, error) { diff --git a/parser/testdata/AlterExternalLibrary140/metadata.json b/parser/testdata/AlterExternalLibrary140/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/AlterExternalLibrary140/metadata.json +++ b/parser/testdata/AlterExternalLibrary140/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/AlterIndexStatementTests150/metadata.json b/parser/testdata/AlterIndexStatementTests150/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/AlterIndexStatementTests150/metadata.json +++ b/parser/testdata/AlterIndexStatementTests150/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/AsymmetricKeyStatementTests100/metadata.json b/parser/testdata/AsymmetricKeyStatementTests100/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/AsymmetricKeyStatementTests100/metadata.json +++ b/parser/testdata/AsymmetricKeyStatementTests100/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines100_AsymmetricKeyStatementTests100/metadata.json b/parser/testdata/Baselines100_AsymmetricKeyStatementTests100/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines100_AsymmetricKeyStatementTests100/metadata.json +++ b/parser/testdata/Baselines100_AsymmetricKeyStatementTests100/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines100_TableParametersTests/metadata.json b/parser/testdata/Baselines100_TableParametersTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines100_TableParametersTests/metadata.json +++ b/parser/testdata/Baselines100_TableParametersTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines110_CreateAlterDatabaseStatementTestsAzure110/metadata.json b/parser/testdata/Baselines110_CreateAlterDatabaseStatementTestsAzure110/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines110_CreateAlterDatabaseStatementTestsAzure110/metadata.json +++ b/parser/testdata/Baselines110_CreateAlterDatabaseStatementTestsAzure110/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines110_CreateSequenceStatementTests/metadata.json b/parser/testdata/Baselines110_CreateSequenceStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines110_CreateSequenceStatementTests/metadata.json +++ b/parser/testdata/Baselines110_CreateSequenceStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines110_ServerAuditStatementTests110/metadata.json b/parser/testdata/Baselines110_ServerAuditStatementTests110/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines110_ServerAuditStatementTests110/metadata.json +++ b/parser/testdata/Baselines110_ServerAuditStatementTests110/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines120_FromClauseTests120/metadata.json b/parser/testdata/Baselines120_FromClauseTests120/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines120_FromClauseTests120/metadata.json +++ b/parser/testdata/Baselines120_FromClauseTests120/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines130_CreateColumnStoreIndexTests130/metadata.json b/parser/testdata/Baselines130_CreateColumnStoreIndexTests130/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines130_CreateColumnStoreIndexTests130/metadata.json +++ b/parser/testdata/Baselines130_CreateColumnStoreIndexTests130/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines130_CreateHekatonTriggerStatementTest/metadata.json b/parser/testdata/Baselines130_CreateHekatonTriggerStatementTest/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines130_CreateHekatonTriggerStatementTest/metadata.json +++ b/parser/testdata/Baselines130_CreateHekatonTriggerStatementTest/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines130_CreateIndexStatementTests130/metadata.json b/parser/testdata/Baselines130_CreateIndexStatementTests130/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines130_CreateIndexStatementTests130/metadata.json +++ b/parser/testdata/Baselines130_CreateIndexStatementTests130/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines140_AlterExternalLibrary140/metadata.json b/parser/testdata/Baselines140_AlterExternalLibrary140/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines140_AlterExternalLibrary140/metadata.json +++ b/parser/testdata/Baselines140_AlterExternalLibrary140/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines140_CreateExternalLibrary140/metadata.json b/parser/testdata/Baselines140_CreateExternalLibrary140/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines140_CreateExternalLibrary140/metadata.json +++ b/parser/testdata/Baselines140_CreateExternalLibrary140/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines140_SelectStatementTests140/metadata.json b/parser/testdata/Baselines140_SelectStatementTests140/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines140_SelectStatementTests140/metadata.json +++ b/parser/testdata/Baselines140_SelectStatementTests140/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines150_AlterIndexStatementTests150/metadata.json b/parser/testdata/Baselines150_AlterIndexStatementTests150/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines150_AlterIndexStatementTests150/metadata.json +++ b/parser/testdata/Baselines150_AlterIndexStatementTests150/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines150_CreateExternalLibrary150/metadata.json b/parser/testdata/Baselines150_CreateExternalLibrary150/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines150_CreateExternalLibrary150/metadata.json +++ b/parser/testdata/Baselines150_CreateExternalLibrary150/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines160_CreateFunctionStatementTests160/metadata.json b/parser/testdata/Baselines160_CreateFunctionStatementTests160/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines160_CreateFunctionStatementTests160/metadata.json +++ b/parser/testdata/Baselines160_CreateFunctionStatementTests160/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines160_ExpressionTests160/metadata.json b/parser/testdata/Baselines160_ExpressionTests160/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines160_ExpressionTests160/metadata.json +++ b/parser/testdata/Baselines160_ExpressionTests160/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines160_VectorFunctionTests160/metadata.json b/parser/testdata/Baselines160_VectorFunctionTests160/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines160_VectorFunctionTests160/metadata.json +++ b/parser/testdata/Baselines160_VectorFunctionTests160/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/Baselines90_AlterAsymmetricKeyStatementTests/metadata.json b/parser/testdata/Baselines90_AlterAsymmetricKeyStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines90_AlterAsymmetricKeyStatementTests/metadata.json +++ b/parser/testdata/Baselines90_AlterAsymmetricKeyStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines90_CreateFulltextCatalogStatementTests/metadata.json b/parser/testdata/Baselines90_CreateFulltextCatalogStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines90_CreateFulltextCatalogStatementTests/metadata.json +++ b/parser/testdata/Baselines90_CreateFulltextCatalogStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines90_CreateTypeStatementTests/metadata.json b/parser/testdata/Baselines90_CreateTypeStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines90_CreateTypeStatementTests/metadata.json +++ b/parser/testdata/Baselines90_CreateTypeStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines90_DumpLoadStatement90Tests/metadata.json b/parser/testdata/Baselines90_DumpLoadStatement90Tests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines90_DumpLoadStatement90Tests/metadata.json +++ b/parser/testdata/Baselines90_DumpLoadStatement90Tests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines90_DumpLoadStatementTests/metadata.json b/parser/testdata/Baselines90_DumpLoadStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/Baselines90_DumpLoadStatementTests/metadata.json +++ b/parser/testdata/Baselines90_DumpLoadStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/Baselines90_InsertStatementTests90/metadata.json b/parser/testdata/Baselines90_InsertStatementTests90/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/Baselines90_InsertStatementTests90/metadata.json +++ b/parser/testdata/Baselines90_InsertStatementTests90/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/BaselinesCommon_AlterFunctionStatementTests/metadata.json b/parser/testdata/BaselinesCommon_AlterFunctionStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/BaselinesCommon_AlterFunctionStatementTests/metadata.json +++ b/parser/testdata/BaselinesCommon_AlterFunctionStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/BaselinesCommon_DeleteStatementTests/metadata.json b/parser/testdata/BaselinesCommon_DeleteStatementTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/BaselinesCommon_DeleteStatementTests/metadata.json +++ b/parser/testdata/BaselinesCommon_DeleteStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/BaselinesCommon_IdentifierTests/metadata.json b/parser/testdata/BaselinesCommon_IdentifierTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/BaselinesCommon_IdentifierTests/metadata.json +++ b/parser/testdata/BaselinesCommon_IdentifierTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/BaselinesCommon_UpdateStatementTests/metadata.json b/parser/testdata/BaselinesCommon_UpdateStatementTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/BaselinesCommon_UpdateStatementTests/metadata.json +++ b/parser/testdata/BaselinesCommon_UpdateStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/CreateAlterDatabaseStatementTestsAzure110/metadata.json b/parser/testdata/CreateAlterDatabaseStatementTestsAzure110/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateAlterDatabaseStatementTestsAzure110/metadata.json +++ b/parser/testdata/CreateAlterDatabaseStatementTestsAzure110/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/CreateColumnStoreIndexTests130/metadata.json b/parser/testdata/CreateColumnStoreIndexTests130/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/CreateColumnStoreIndexTests130/metadata.json +++ b/parser/testdata/CreateColumnStoreIndexTests130/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/CreateExternalLibrary140/metadata.json b/parser/testdata/CreateExternalLibrary140/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateExternalLibrary140/metadata.json +++ b/parser/testdata/CreateExternalLibrary140/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/CreateExternalLibrary150/metadata.json b/parser/testdata/CreateExternalLibrary150/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateExternalLibrary150/metadata.json +++ b/parser/testdata/CreateExternalLibrary150/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/CreateFunctionStatementTests160/metadata.json b/parser/testdata/CreateFunctionStatementTests160/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/CreateFunctionStatementTests160/metadata.json +++ b/parser/testdata/CreateFunctionStatementTests160/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/CreateHekatonTriggerStatementTest/metadata.json b/parser/testdata/CreateHekatonTriggerStatementTest/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateHekatonTriggerStatementTest/metadata.json +++ b/parser/testdata/CreateHekatonTriggerStatementTest/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/CreateIndexStatementTests130/metadata.json b/parser/testdata/CreateIndexStatementTests130/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateIndexStatementTests130/metadata.json +++ b/parser/testdata/CreateIndexStatementTests130/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/CreateSequenceStatementTests/metadata.json b/parser/testdata/CreateSequenceStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateSequenceStatementTests/metadata.json +++ b/parser/testdata/CreateSequenceStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/CreateTypeStatementTests/metadata.json b/parser/testdata/CreateTypeStatementTests/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/CreateTypeStatementTests/metadata.json +++ b/parser/testdata/CreateTypeStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/DeleteStatementTests/metadata.json b/parser/testdata/DeleteStatementTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/DeleteStatementTests/metadata.json +++ b/parser/testdata/DeleteStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/ExpressionTests160/metadata.json b/parser/testdata/ExpressionTests160/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/ExpressionTests160/metadata.json +++ b/parser/testdata/ExpressionTests160/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/InsertStatementTests90/metadata.json b/parser/testdata/InsertStatementTests90/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/InsertStatementTests90/metadata.json +++ b/parser/testdata/InsertStatementTests90/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/PhaseOne_CreateFulltextCatalog/metadata.json b/parser/testdata/PhaseOne_CreateFulltextCatalog/metadata.json index 9e26dfee..a989cc0a 100644 --- a/parser/testdata/PhaseOne_CreateFulltextCatalog/metadata.json +++ b/parser/testdata/PhaseOne_CreateFulltextCatalog/metadata.json @@ -1 +1 @@ -{} \ No newline at end of file +{"invalid_syntax": true} diff --git a/parser/testdata/SelectStatementTests140/metadata.json b/parser/testdata/SelectStatementTests140/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/SelectStatementTests140/metadata.json +++ b/parser/testdata/SelectStatementTests140/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/ServerAuditStatementTests110/metadata.json b/parser/testdata/ServerAuditStatementTests110/metadata.json index ccffb5b9..9e26dfee 100644 --- a/parser/testdata/ServerAuditStatementTests110/metadata.json +++ b/parser/testdata/ServerAuditStatementTests110/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} \ No newline at end of file diff --git a/parser/testdata/TableParametersTests/metadata.json b/parser/testdata/TableParametersTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/TableParametersTests/metadata.json +++ b/parser/testdata/TableParametersTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/UpdateStatementTests/metadata.json b/parser/testdata/UpdateStatementTests/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/UpdateStatementTests/metadata.json +++ b/parser/testdata/UpdateStatementTests/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{} diff --git a/parser/testdata/VectorFunctionTests160/metadata.json b/parser/testdata/VectorFunctionTests160/metadata.json index ccffb5b9..0967ef42 100644 --- a/parser/testdata/VectorFunctionTests160/metadata.json +++ b/parser/testdata/VectorFunctionTests160/metadata.json @@ -1 +1 @@ -{"todo": true} \ No newline at end of file +{}