diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 59aa5b5..1a133c2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @barbaravaldez @dzsquared @zijchen @chlafreniere @llali \ No newline at end of file +* @dzsquared @zijchen @chlafreniere @llali @ssreerama @kburtram diff --git a/.github/README.md b/.github/README.md new file mode 100644 index 0000000..ce0ce57 --- /dev/null +++ b/.github/README.md @@ -0,0 +1,304 @@ +# SqlScriptDOM Documentation Guide + +Welcome to the SqlScriptDOM documentation! This folder contains comprehensive guides for understanding, developing, and debugging the SQL Server T-SQL parser. + +## πŸš€ Quick Start + +**New to the project?** Start here: +1. Read [copilot-instructions.md](copilot-instructions.md) - Main project documentation +2. Browse [debugging_workflow.guidelines.instructions.md](instructions/debugging_workflow.guidelines.instructions.md) - Visual quick reference + +**Fixing a bug?** Start here: +1. Open [debugging_workflow.guidelines.instructions.md](instructions/debugging_workflow.guidelines.instructions.md) - Identify bug type +2. Follow the flowchart to the appropriate guide +3. Use the step-by-step instructions + +## πŸ“š Documentation Map + +### Core Documentation + +#### [copilot-instructions.md](copilot-instructions.md) - **START HERE** +**Purpose**: Main project documentation and overview +**Contains**: +- Project structure and key files +- Build and test commands +- Developer workflow +- Bug fixing triage +- Debugging tips +- Grammar gotchas and pitfalls + +**When to read**: First time working on the project, or for general context + +--- + +### Quick Reference + +#### [debugging_workflow.guidelines.instructions.md](instructions/debugging_workflow.guidelines.instructions.md) - **QUICK REFERENCE** +**Purpose**: Visual guide for quick bug diagnosis +**Contains**: +- Diagnostic flowchart +- Error pattern recognition +- Investigation steps +- Testing commands reference +- Key files reference +- Common pitfalls + +**When to use**: When you have a bug and need to quickly identify what type of fix is needed + +--- + +### Specialized Fix Guides + +#### [Validation_fix.guidelines.instructions.md](instructions/Validation_fix.guidelines.instructions.md) - Most Common Fix Type ⭐ +**Purpose**: Fixing validation-based bugs +**When to use**: +- βœ… Error: "Option 'X' is not valid..." or "Feature not supported..." +- βœ… Same syntax works in different context (e.g., ALTER INDEX vs ALTER TABLE) +- βœ… SQL Server version-specific features + +**Contains**: +- Real-world example (ALTER TABLE ADD CONSTRAINT RESUMABLE) +- Version flag patterns +- Validation logic modification +- Testing strategy + +**Complexity**: ⭐ Easy +**Typical time**: 1-2 hours + +--- + +#### [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md) - Grammar Changes +**Purpose**: Adding new syntax or modifying parser grammar +**When to use**: +- βœ… Error: "Incorrect syntax near..." or "Unexpected token..." +- βœ… Parser doesn't recognize new T-SQL features +- βœ… Need to add new keywords, operators, or statements + +**Contains**: +- Complete bug-fixing workflow +- Grammar modification process +- AST updates +- Script generator changes +- Baseline generation +- Decision tree for bug types + +**Complexity**: ⭐⭐⭐ Medium to Hard +**Typical time**: 4-8 hours + +--- + +#### [parser.guidelines.instructions.md](instructions/parser.guidelines.instructions.md) +**Purpose**: Fixing parentheses recognition issues +**When to use**: +- βœ… `WHERE PREDICATE(...)` works +- ❌ `WHERE (PREDICATE(...))` fails with syntax error +- βœ… Identifier-based boolean predicates + +**Contains**: +- `IsNextRuleBooleanParenthesis()` modification +- Predicate detection patterns +- Real example (REGEXP_LIKE) + +**Complexity**: ⭐⭐ Easy-Medium +**Typical time**: 1-3 hours + +--- + +#### [grammer.guidelines.instructions.md](instructions/grammer.guidelines.instructions.md) +**Purpose**: Common patterns for extending existing grammar +**When to use**: +- βœ… Need to extend literal types to accept expressions +- βœ… Adding new enum members +- βœ… Creating new function/statement types + +**Contains**: +- Literal to expression pattern +- Real example (VECTOR_SEARCH TOP_N) +- Context-specific grammar rules +- Shared rule warnings + +**Complexity**: ⭐⭐⭐ Medium +**Typical time**: 3-6 hours + +--- + +### Meta Documentation + +#### [documentation.guidelines.instructions.md](instructions/documentation.guidelines.instructions.md) +**Purpose**: Summary of documentation improvements +**Contains**: +- What was improved and why +- Before/after comparison +- Real-world validation (ALTER TABLE RESUMABLE) +- Lessons learned + +**When to read**: If you want to understand the documentation structure and evolution + +--- + +## 🎯 Bug Type Decision Tree + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ You have a parsing bug β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” + β”‚ What's the β”‚ + β”‚ error message?β”‚ + β””β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β” + β”‚ β”‚ β”‚ + β–Ό β–Ό β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β” +β”‚Optionβ”‚ β”‚Syntaxβ”‚ β”‚Parensβ”‚ +β”‚error β”‚ β”‚error β”‚ β”‚break β”‚ +β””β”€β”€β”¬β”€β”€β”€β”˜ β””β”€β”€β”¬β”€β”€β”€β”˜ β””β”€β”€β”¬β”€β”€β”€β”˜ + β”‚ β”‚ β”‚ + β–Ό β–Ό β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β” +β”‚VALID-β”‚ β”‚BUG β”‚ β”‚PARSERβ”‚ +β”‚ATION β”‚ β”‚FIXINGβ”‚ β”‚PRED β”‚ +β”‚FIX β”‚ β”‚GUIDE β”‚ β”‚RECOG β”‚ +β””β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”˜ +``` + +## πŸ“‹ Quick Reference Table + +| Error Message | Bug Type | Guide | Complexity | +|--------------|----------|-------|------------| +| "Option 'X' is not valid in statement Y" | Validation | [Validation_fix.guidelines.instructions.md](instructions/Validation_fix.guidelines.instructions.md) | ⭐ Easy | +| "Feature 'X' not supported in version Y" | Validation | [Validation_fix.guidelines.instructions.md](instructions/Validation_fix.guidelines.instructions.md) | ⭐ Easy | +| "Incorrect syntax near keyword" | Grammar | [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md) | ⭐⭐⭐ Medium | +| "Unexpected token" | Grammar | [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md) | ⭐⭐⭐ Medium | +| Syntax error with parentheses only | Predicate Recognition | [parser.guidelines.instructions.md](instructions/parser.guidelines.instructions.md) | ⭐⭐ Easy-Medium | +| Need to extend literal to expression | Grammar Extension | [GRAMMAR_EXTENSION_PATTERNS](GRAMMAR_EXTENSION_PATTERNS.md) | ⭐⭐⭐ Medium | + +## πŸ” Common Scenarios + +### Scenario 1: New SQL Server Feature Not Recognized +**Example**: `ALTER TABLE ... WITH (RESUMABLE = ON)` fails +**Likely Issue**: Validation blocking the option +**Start With**: [VALIDATION_FIX_GUIDE.md](VALIDATION_FIX_GUIDE.md) + +### Scenario 2: New T-SQL Keyword Not Parsed +**Example**: `CREATE EXTERNAL TABLE` not recognized +**Likely Issue**: Grammar doesn't have rules for this syntax +**Start With**: [BUG_FIXING_GUIDE.md](BUG_FIXING_GUIDE.md) + +### Scenario 3: Function Works Sometimes, Fails with Parentheses +**Example**: `WHERE REGEXP_LIKE(...)` fails +**Likely Issue**: Predicate recognition +**Start With**: [PARSER_PREDICATE_RECOGNITION_FIX.md](PARSER_PREDICATE_RECOGNITION_FIX.md) + +### Scenario 4: Parameter Support Needed +**Example**: `TOP_N = @parameter` should work +**Likely Issue**: Need to extend from literal to expression +**Start With**: [GRAMMAR_EXTENSION_PATTERNS.md](GRAMMAR_EXTENSION_PATTERNS.md) + +## πŸ› οΈ Essential Commands + +```bash +# Build parser +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Run specific test +dotnet test --filter "FullyQualifiedName~YourTest" -c Debug + +# Run ALL tests (CRITICAL before committing!) +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Search for error code +grep -r "SQL46057" SqlScriptDom/ + +# Search for option usage +grep -r "RESUMABLE" Test/SqlDom/TestScripts/ +``` + +## πŸ“Š Documentation Statistics + +- **Total Guides**: 6 comprehensive guides +- **Bug Types Covered**: 3 main types (validation, grammar, predicate recognition) +- **Real-World Examples**: 4 detailed examples with code +- **Code Samples**: 50+ practical bash/C#/SQL examples +- **Quick References**: 3 tables and 2 flowcharts + +## πŸŽ“ Learning Path + +### Beginner Path (Understanding the Project) +1. [copilot-instructions.md](copilot-instructions.md) - Read "Key points" section +2. [debugging_workflow.guidelines.instructions.md](instructions/debugging_workflow.guidelines.instructions.md) - Understand bug types +3. [Validation_fix.guidelines.instructions.md](instructions/Validation_fix.guidelines.instructions.md) - Follow ALTER TABLE RESUMABLE example +4. Try fixing a validation bug yourself + +**Time**: 2-3 hours + +### Intermediate Path (Grammar Changes) +1. Review beginner path first +2. [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md) - Complete workflow +3. [grammer.guidelines.instructions.md](instructions/grammer.guidelines.instructions.md) - Common patterns +4. [copilot-instructions.md](copilot-instructions.md) - "Grammar Gotchas" section +5. Try adding a simple new keyword + +**Time**: 4-6 hours + +### Advanced Path (Complex Features) +1. Master beginner and intermediate paths +2. [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md) - AST modifications +3. [grammer.guidelines.instructions.md](instructions/grammer.guidelines.instructions.md) - All patterns +4. Study existing complex features (e.g., VECTOR_SEARCH) +5. Implement a new statement type + +**Time**: 8-16 hours + +## 🚨 Critical Reminders + +### Always Do This: +- βœ… **Run full test suite** before committing (1,100+ tests) +- βœ… **Check Microsoft docs** for exact version support +- βœ… **Search for error messages** first before coding +- βœ… **Create context-specific rules** instead of modifying shared ones +- βœ… **Test across all SQL Server versions** in test configuration + +### Never Do This: +- ❌ Modify shared grammar rules without understanding impact +- ❌ Skip running the full test suite +- ❌ Assume version support - always verify documentation +- ❌ Edit generated files in `obj/` directory +- ❌ Commit without testing baseline generation + +## 🀝 Contributing + +When improving these docs: +1. Use real examples from actual bugs +2. Include complete code samples (not pseudo-code) +3. Add bash commands that actually work +4. Cross-reference related guides +5. Update this README if adding new guides + +## πŸ“ž Getting Help + +If stuck: +1. Search error message in codebase: `grep -r "your error"` +2. Check similar working syntax: `grep -r "keyword" Test/SqlDom/` +3. Review relevant guide based on bug type +4. Check Git history for similar fixes: `git log --grep="RESUMABLE"` + +## πŸŽ‰ Success Metrics + +You know you've succeeded when: +- βœ… Your specific test passes +- βœ… **ALL 1,100+ tests pass** (critical!) +- βœ… Baseline matches generated output +- βœ… Version-specific behavior is correct +- βœ… No regressions in existing functionality + +--- + +**Last Updated**: Based on ALTER TABLE RESUMABLE fix (October 2025) + +**Contributors**: Documentation improved based on practical bug-fixing experience + +**Feedback**: These guides are living documents. Please update them when you discover new patterns or better approaches! diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..9920c5e --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,209 @@ +# Copilot / AI instructions for SqlScriptDOM + +ScriptDom is a library for parsing and generating T-SQL scripts. It is primarily used by DacFx to build database projects, perform schema comparisons, and generate scripts for deployment. + +## Key points (quick read) +- Grammar files live in: `SqlScriptDom/Parser/TSql/` β€” each file corresponds to a SQL Server version (e.g. `TSql170.g` for 170 / SQL Server 2025). +- Grammar format: ANTLR v2. Generated C# lexer/parser code is produced during the build (see `GenerateFiles.props`). +- Build & tests: use the .NET SDK pinned in `global.json`. Typical commands from repo root: + - `dotnet build -c Debug` + - `dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug` +- To regenerate parser/token/AST sources explicitly, build the main project (generation targets are hooked into its build): + - `dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug` + - (or) `dotnet msbuild SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -t:GLexerParserCompile;GSqlTokenTypesCompile;CreateAST -p:Configuration=Debug` + +## Why files are generated and where +- `SqlScriptDom/GenerateFiles.props` contains the MSBuild targets invoked during the library build: + - `GSqlTokenTypesCompile` / `GLexerParserCompile` -> run ANTLR and post-process outputs (powershell/sed scripts) + - `CreateAST` -> runs AstGen tool (from `tools/AstGen`) to generate AST visitor/fragment classes + - `GenerateEverything` -> runs ScriptGenSettingsGenerator and TokenListGenerator +- The Antlr binary is downloaded to the path defined in `Directory.Build.props` (`AntlrLocation`) when the build runs (via the `InstallAntlr` target). +- Generated C# files are written to `$(CsGenIntermediateOutputPath)` (under `obj/...` by default). Do not hand-edit generated files β€” change the .g grammar or post-processing scripts instead. + +## Important files and folders (read these first) +- `SqlScriptDom/Parser/TSql/*.g` β€” ANTLR v2 grammar files (TSql80..TSql170 etc.). Example: `TSql170.g` defines new-170 syntax. +- `SqlScriptDom/GenerateFiles.props` and `Directory.Build.props` β€” define code generation targets and antlr location. +- `SqlScriptDom/ParserPostProcessing.sed`, `LexerPostProcessing.sed`, `TSqlTokenTypes.ps1` β€” post-processing for generated C# sources and tokens. +- `tools/` β€” contains code generators used during build: `AstGen`, `ScriptGenSettingsGenerator`, `TokenListGenerator`. +- `Test/SqlDom/` β€” unit tests, baselines and test scripts. See `Only170SyntaxTests.cs`, `TestScripts/`, and `Baselines170/`. +- `.github/instructions/testing.guidelines.instructions.md` β€” comprehensive testing framework guide with patterns and best practices. +- `.github/instructions/function.guidelines.instructions.md` β€” specialized guide for adding new T-SQL system functions. + +## Developer workflow & conventions (typical change cycle) +1. Add/modify grammar rule(s) in the correct `TSql*.g` (pick the _version_ the syntax belongs to). +2. If tokens or token ordering change, update `TSqlTokenTypes.g` (and the sed/ps1 post-processors if necessary). +3. Rebuild the ScriptDom project to regenerate parser and AST (`dotnet build` will run generation). Use the targeted msbuild targets if you only want generation. +4. Add tests: + - **YOU MUST ADD UNIT TESTS** - Use the existing test framework in `Test/SqlDom/` + - **DO NOT CREATE STANDALONE PROGRAMS TO TEST** - Avoid separate console applications or debug programs + - Put the input SQL in `Test/SqlDom/TestScripts/` (filename is case sensitive and used as an embedded resource). + - Add/confirm baseline output in `Test/SqlDom/Baselines/` (the UT project embeds these baselines as resources). + - Update the appropriate `OnlySyntaxTests.cs` (e.g., `Only170SyntaxTests.cs`) by adding a `ParserTest170("MyNewTest.sql", ...)` entry. See `ParserTest.cs` and `ParserTestOutput.cs` for helper constructors and verification semantics. + - **For comprehensive testing guidance**, see [Testing Guidelines](instructions/testing.guidelines.instructions.md) with detailed patterns, best practices, and simplified constructor approaches. +5. **Run full test suite** to ensure no regressions: + ```bash + dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + ``` + - ⚠️ **CRITICAL**: Grammar changes can break unrelated functionality when shared rules are modified + - If tests fail unexpectedly, create context-specific grammar rules instead of modifying shared ones +6. Iterate until all tests pass, including both new functionality and existing regression tests. + +## Testing details and how tests assert correctness +- Tests run a full parse -> script generator -> reparse round-trip. Baseline comparison verifies pretty-printed generated scripts exactly match the stored baseline. +- Expected parse errors (where applicable) are verified by number and exact error messages; test helpers live in `ParserTest.cs`, `ParserTestOutput.cs`, and `ParserTestUtils.cs`. +- If a test fails due to mismatch in generated script, compare the generated output (the test harness logs it) against the baseline to spot formatting/structure differences. + +## Bug Fixing and Baseline Generation + +Different types of bugs require different fix approaches. **Start by diagnosing which type of issue you're dealing with:** + +### 1. Validation-Based Issues (Most Common) +If you see an error like "Option 'X' is not valid..." or "Feature 'Y' not supported..." but the syntax SHOULD work according to SQL Server docs: +- **Guide**: [Validation Fix Guide](instructions/validation_fix.guidelines.instructions.md) - Version-gated validation fixes +- **Example**: ALTER TABLE RESUMABLE option (SQL Server 2022+) +- **Key Signal**: Similar syntax works in other contexts (e.g., ALTER INDEX works but ALTER TABLE doesn't) + +### 2. Grammar-Based Issues (Adding New Syntax) +If the parser doesn't recognize the syntax at all, or you need to add new T-SQL features: +- **Guide**: [Bug Fixing Guide](instructions/bug_fixing.guidelines.instructions.md) - Grammar modifications, AST updates, script generation +- **Example**: Adding new operators, statements, or function types +- **Key Signal**: Syntax error like "Incorrect syntax near..." or "Unexpected token..." + +### 3. Parser Predicate Recognition Issues (Parentheses) +If identifier-based predicates (like `REGEXP_LIKE`) work without parentheses but fail with them: +- **Guide**: [Parser Predicate Recognition Fix Guide](instructions/parser.guidelines.instructions.md) +- **Example**: `WHERE REGEXP_LIKE('a', 'pattern')` works, but `WHERE (REGEXP_LIKE('a', 'pattern'))` fails +- **Key Signal**: Syntax error near closing parenthesis or semicolon + +**Quick Diagnostic**: Search for the error message in the codebase to determine which type of fix is needed. + +### 4. Adding New System Functions +For adding new T-SQL system functions to the parser, including handling RETURN statement contexts and ANTLR v2 syntactic predicate limitations: +- **Guide**: [Function Guidelines](instructions/function.guidelines.instructions.md) - Complete guide for system function implementation +- **Example**: JSON_OBJECT, JSON_ARRAY functions with RETURN statement support +- **Key Requirements**: Syntactic predicates for lookahead, proper AST design, comprehensive testing + +### 5. Adding New Data Types +For adding completely new SQL Server data types that require custom parsing logic and specialized AST nodes: +- **Guide**: [New Data Types Guidelines](instructions/new_data_types.guidelines.instructions.md) - Complete guide for implementing new data types +- **Example**: VECTOR data type with dimension and optional base type parameters +- **Key Signal**: New SQL Server data type with custom parameter syntax different from standard data types + +### 6. Adding New Index Types +For adding completely new SQL Server index types that require specialized syntax and custom parsing logic: +- **Guide**: [New Index Types Guidelines](instructions/new_index_types.guidelines.instructions.md) - Complete guide for implementing new index types +- **Example**: JSON INDEX with FOR clause, VECTOR INDEX with METRIC/TYPE options +- **Key Signal**: New SQL Server index type with custom syntax different from standard CREATE INDEX + +## Editing generated outputs, debugging generation +- Never edit generated files permanently (they live under `obj/...`/CsGenIntermediateOutputPath). Instead change: + - `.g` grammar files + - post-processing scripts (`*.ps1`/`*.sed`) + - AST XML in `SqlScriptDom/Parser/TSql/Ast.xml` if AST node shapes need to change (used by `tools/AstGen`). +- To see antlr output/errors, force verbose generation by setting MSBuild property `OutputErrorInLexerParserCompile=true` on the command line (e.g. `dotnet msbuild -t:GLexerParserCompile -p:OutputErrorInLexerParserCompile=true`). +- If the antlr download fails during build, manually download `antlr-2.7.5.jar` (for non-Windows) or `.exe` (for Windows) and place it at the location defined in `Directory.Build.props` or override `AntlrLocation` when invoking msbuild. + +## Debugging Tips and Investigation Workflow + +### Step 1: Identify the Bug Type +Start by searching for the error message to understand what type of fix is needed: +```bash +# Search for error code or message +grep -r "SQL46057" SqlScriptDom/ +grep -r "is not a valid" SqlScriptDom/ +``` + +**Common Error Patterns**: +- `"Option 'X' is not valid..."` β†’ Validation issue (see [grammar_validation.guidelines.instructions.md](instructions/grammar_validation.guidelines.instructions.md)) +- `"Incorrect syntax near..."` β†’ Grammar issue (see [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md)) +- `"Syntax error near ')'"` with parentheses β†’ Predicate recognition (see [parser.guidelines.instructions.md](instructions/parser.guidelines.instructions.md)) + +### Step 2: Find Where Similar Syntax Works +If the syntax works in one context but not another: +```bash +# Search for working examples +grep -r "RESUMABLE" Test/SqlDom/TestScripts/ +grep -r "OptionName" SqlScriptDom/Parser/TSql/ +``` + +**Example**: ALTER INDEX with RESUMABLE works, but ALTER TABLE doesn't β†’ Likely validation issue + +### Step 3: Locate the Relevant Code +Common files to check: +- **Validation**: `SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs` (most validation logic) +- **Grammar**: `SqlScriptDom/Parser/TSql/TSql*.g` (version-specific grammar files) +- **Options**: `SqlScriptDom/ScriptDom/SqlServer/IndexOptionHelper.cs` (option registration) +- **AST**: `SqlScriptDom/Parser/TSql/Ast.xml` (AST node definitions) + +### Step 4: Check SQL Server Version Support +Always verify Microsoft documentation: +- Search for "Applies to: SQL Server 20XX (XX.x)" in Microsoft docs +- Note that different features within the same option set can have different version requirements +- Example: MAX_DURATION (SQL 2014+) vs RESUMABLE (SQL 2022+) + +### Step 5: Verify with Tests +Before and after making changes: +```bash +# Build the parser +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Run specific test +dotnet test --filter "FullyQualifiedName~YourTest" -c Debug + +# ALWAYS run full suite before committing +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +``` + +### Common Investigation Patterns + +#### Pattern 1: Option Not Recognized +```bash +# Find where option is registered +grep -r "YourOptionName" SqlScriptDom/ScriptDom/SqlServer/IndexOptionHelper.cs + +# Check enum definition +grep -r "enum IndexOptionKind" SqlScriptDom/ +``` + +#### Pattern 2: Version-Specific Behavior +```bash +# Find version checks +grep -r "TSql160AndAbove" SqlScriptDom/Parser/TSql/ + +# Check which parser version you're testing +# TSql80 = SQL Server 2000, TSql90 = 2005, ..., TSql160 = 2022, TSql170 = 2025 +``` + +#### Pattern 3: Statement-Specific Restrictions +```bash +# Find validation by statement type +grep -r "IndexAffectingStatement" SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs + +# Common statement types: CreateIndex, AlterIndex, AlterTableAddElement +``` + + +## Patterns & code style to follow (examples you will see) +- Grammar rule pattern: `ruleName returns [Type vResult = this.FragmentFactory.CreateFragment()] { ... } : ( alternatives ) ;` β€” this pattern initializes an AST fragment via FragmentFactory. +- Parser-generated code frequently uses `Match(, CodeGenerationSupporter.)` and `ThrowParseErrorException("SQLxxxx", ...)` for diagnostics. +- The codebase prefers using the factory and fragment visitors for AST creation and script generation. Look at `ScriptDom/SqlServer/ScriptGenerator` for script generation patterns. + +## Grammar Gotchas & Common Pitfalls +- **Operator vs. Function-Style Predicates:** Be careful to distinguish between standard T-SQL operators (like `NOT LIKE`, `>`, `=`) and the function-style predicates used in some contexts (like `package.equals(...)` in `CREATE EVENT SESSION`). For example, `NOT LIKE` in an event session's `WHERE` clause is a standard comparison operator, not a function call. Always verify the exact T-SQL syntax before modifying the grammar. +- **Logical `NOT` vs. Compound Operators:** The grammar handles the logical `NOT` operator (e.g., `WHERE NOT (condition)`) in a general way, often in a `booleanExpressionUnary` rule. This is distinct from compound operators like `NOT LIKE` or `NOT IN`, which are typically parsed as a single unit within a comparison rule. Don't assume that because `NOT` is supported, `NOT LIKE` will be automatically supported in all predicate contexts. +- **Modifying Shared Grammar Rules:** **NEVER modify existing shared grammar rules** like `identifierColumnReferenceExpression` that are used throughout the codebase. This can cause tests to fail in unrelated areas because the rule now accepts or rejects different syntax. Instead, create specialized rules for your specific context (e.g., `vectorSearchColumnReferenceExpression` for VECTOR_SEARCH-specific needs). +- **Full Test Suite Validation:** After any grammar changes, **always run the complete test suite** (`dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug`) to catch regressions. Grammar changes can have far-reaching effects on seemingly unrelated functionality. +- **Extending Literals to Expressions:** When functions/constructs currently accept only literal values (e.g., `IntegerLiteral`, `StringLiteral`) but need to support dynamic values (parameters, variables, outer references), change both the AST definition (in `Ast.xml`) and grammar rules (in `TSql*.g`) to use `ScalarExpression` instead. This pattern was used for VECTOR_SEARCH TOP_N parameter. See the detailed example in [bug_fixing.guidelines.instructions.md](instructions/bug_fixing.guidelines.instructions.md#special-case-extending-grammar-rules-from-literals-to-expressions) and [grammer.guidelines.instructions.md](instructions/grammer.guidelines.instructions.md) for comprehensive patterns. + +# Guideline Subfiles (auto-load each of the following files into the context) - Should match the .config/GuidelineReviewAgent.yaml used by the guideline_review_agent. +include: .github/instructions/grammar_validation.guidelines.instructions.md +include: .github/instructions/bug_fixing.guidelines.instructions.md +include: .github/instructions/parser.guidelines.instructions.md +include: .github/instructions/function.guidelines.instructions.md +include: .github/instructions/new_data_types.guidelines.instructions.md +include: .github/instructions/new_index_types.guidelines.instructions.md +include: .github/instructions/debugging_workflow.guidelines.instructions.md +include: .github/instructions/grammer.guidelines.instructions.md +include: .github/instructions/testing.guidelines.instructions.md + + diff --git a/.github/instructions/bug_fixing.guidelines.instructions.md b/.github/instructions/bug_fixing.guidelines.instructions.md new file mode 100644 index 0000000..b1a6793 --- /dev/null +++ b/.github/instructions/bug_fixing.guidelines.instructions.md @@ -0,0 +1,239 @@ +# Bug Fixing Guide for SqlScriptDOM + +This guide provides a summary of the typical workflow for fixing a bug in the SqlScriptDOM parser, based on practical experience. For a more comprehensive overview of the project structure and code generation, please refer to the main [Copilot / AI instructions for SqlScriptDOM](../copilot-instructions.md). + +## Before You Start: Identify the Bug Type + +**IMPORTANT**: Not all bugs require grammar changes. Determine which type of fix you need: + +1. **Validation Issues**: Syntax is already parseable but incorrectly rejected + - Error: "Option 'X' is not valid..." or "Feature 'Y' not supported..." + - Example: ALTER TABLE RESUMABLE works in ALTER INDEX but not ALTER TABLE + - **β†’ Use [grammar_validation.guidelines.instructions.md](grammar_validation.guidelines.instructions.md) instead of this guide** + +2. **Grammar Issues**: Parser doesn't recognize the syntax at all (THIS guide) + - Error: "Incorrect syntax near..." or "Unexpected token..." + - Example: Adding new keywords, operators, or statement types + - **β†’ Continue with this guide** + +3. **Predicate Recognition**: Identifier predicates fail with parentheses + - Error: `WHERE REGEXP_LIKE(...)` works but `WHERE (REGEXP_LIKE(...))` fails + - **β†’ Use [parser.guidelines.instructions.md](parser.guidelines.instructions.md)** + +## Summary of the Bug-Fixing Workflow + +The process of fixing a bug, especially one that involves adding new syntax, follows these general steps: + +1. **Grammar Modification**: + * Identify the correct grammar rule to modify in the `SqlScriptDom/Parser/TSql/*.g` files. + * Apply the necessary changes to all relevant `.g` files, from the version where the syntax was introduced up to the latest version (e.g., `TSql130.g` through `TSql170.g` and `TSqlFabricDW.g`). + +2. **Abstract Syntax Tree (AST) Update**: + * If the new syntax requires a new AST node or enum member, edit `SqlScriptDom/Parser/TSql/Ast.xml`. For example, adding a new operator like `NOT LIKE` required adding a `NotLike` member to the `BooleanComparisonType` enum. + +3. **Script Generation Update**: + * Update the script generator to handle the new AST node or enum. This typically involves modifying files in `SqlScriptDom/ScriptDom/SqlServer/ScriptGenerator/`. For the `NOT LIKE` example, this meant adding an entry to the `_booleanComparisonTypeGenerators` dictionary in `SqlScriptGeneratorVisitor.CommonPhrases.cs`. + +5. **Build the Project**: + * After making code changes, run a build to regenerate the parser and ensure everything compiles correctly: + ```bash + dotnet build + ``` + +6. **Add a Unit Test**: + * **YOU MUST ADD UNIT TESTS** - Create a new `.sql` file in `Test/SqlDom/TestScripts/` that contains the specific syntax for the new test case. + * **DO NOT CREATE STANDALONE PROGRAMS TO TEST** - Use the existing test framework, not separate console applications or debug programs. + +7. **Define the Test Case**: + * Add a new `ParserTest` entry to the appropriate `OnlySyntaxTests.cs` files (e.g., `Only130SyntaxTests.cs`). This entry points to your new test script and defines the expected number of parsing errors for each SQL Server version. + +8. **Generate and Verify Baselines**: + This is a critical and multi-step process: + * **a. Create Placeholder Baseline Files**: Before running the test, create empty or placeholder baseline files in the corresponding `Test/SqlDom/Baselines/` directories. The filename must match the test script's filename. + * **b. Run the Test to Get the Generated Script**: Run the specific test that you just added. It is *expected to fail* because the placeholder baseline will not match the script generated by the parser. + ```bash + # Example filter for running a specific test + dotnet test --filter "FullyQualifiedName~YourTestMethodName" + ``` + * **c. Update the Baseline Files**: Copy the "Actual" output from the test failure log. This is the correctly formatted script generated from the AST. Paste this content into all the baseline files you created in step 8a. + * **d. Re-run the Tests**: Run the same test command again. This time, the tests should pass, confirming that the generated script matches the new baseline. + + **Practical Example - Baseline Generation Workflow**: + ```bash + # 1. Create test script + echo "ALTER TABLE t ADD CONSTRAINT pk PRIMARY KEY (id) WITH (RESUMABLE = ON);" > Test/SqlDom/TestScripts/AlterTableResumableTests160.sql + + # 2. Create empty baseline + touch Test/SqlDom/Baselines160/AlterTableResumableTests160.sql + + # 3. Add test entry to Only160SyntaxTests.cs: + # new ParserTest160("AlterTableResumableTests160.sql", nErrors80: 1, ...), + + # 4. Run test (will fail, showing generated output) + dotnet test --filter "AlterTableResumableTests" -c Debug + + # 5. Copy the "Actual" output from test failure into baseline file + # Output looks like: "ALTER TABLE t ADD CONSTRAINT pk PRIMARY KEY (id) WITH (RESUMABLE = ON);" + + # 6. Re-run test (should pass now) + dotnet test --filter "AlterTableResumableTests" -c Debug + ``` + +9. **⚠️ CRITICAL: Run Full Test Suite**: + * **Always run the complete test suite** to ensure your changes didn't break existing functionality: + ```bash + dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + ``` + * **Why this is critical**: Grammar changes can have unintended side effects on other parts of the parser. Shared grammar rules are used in multiple contexts. + * **Common issues**: Modifying shared rules like `identifierColumnReferenceExpression` can cause other tests to fail because they now accept syntax that should be rejected. + * **Solution**: If tests fail, create context-specific grammar rules instead of modifying shared ones. + +By following these steps, you can ensure that new syntax is correctly parsed, represented in the AST, generated back into a script, and fully validated by the testing framework without breaking existing functionality. + +## Testing Best Practices + +### βœ… DO: Use Existing Test Framework +- Add test methods to existing test classes like `Only170SyntaxTests.cs` +- Use the established `TSqlParser.Parse()` pattern for verification +- Example: + ```csharp + [TestMethod] + public void VerifyNewSyntax() + { + var parser = new TSql170Parser(true); + var result = parser.Parse(new StringReader("YOUR SQL HERE"), out var errors); + Assert.AreEqual(0, errors.Count, "Should parse without errors"); + } + ``` + +### ❌ DON'T: Create New Test Projects +- **Never** create standalone `.csproj` files for testing parser functionality +- **Never** create new console applications or test runners +- This causes build issues and doesn't integrate with the existing test infrastructure + +## Special Case: Extending Grammar Rules from Literals to Expressions + +A common type of bug involves extending existing grammar rules that only accept literal values (like integers or strings) to accept full expressions (parameters, variables, outer references, etc.). This pattern was used to fix the VECTOR_SEARCH TOP_N parameter issue. + +### Example: VECTOR_SEARCH TOP_N Parameter Extension + +**Problem**: VECTOR_SEARCH's TOP_N parameter only accepted integer literals (`TOP_N = 10`) but needed to support parameters (`TOP_N = @k`) and outer references (`TOP_N = outerref.col`). + +**Solution Steps**: + +1. **Update AST Definition** (`SqlScriptDom/Parser/TSql/Ast.xml`): + ```xml + + + + + + ``` + +2. **Update Grammar Rule** (`SqlScriptDom/Parser/TSql/TSql170.g`): + ```antlr + // Before - Variable declaration: + IntegerLiteral vTopN; + + // After - Variable declaration: + ScalarExpression vTopN; + + // Before - Parsing rule: + vTopN = integer + + // After - Parsing rule: + vTopN = expression + ``` + +3. **Script Generator**: Usually no changes needed if using `GenerateNameEqualsValue()` or similar generic methods that work with `TSqlFragment`. + +4. **Test Cases**: Add tests covering the new expression types: + ```sql + -- Parameter test + TOP_N = @k + + -- Outer reference test + TOP_N = outerref.max_results + ``` + +### When to Apply This Pattern + +Use this pattern when: +- βœ… Existing grammar accepts only literal values (integer, string, etc.) +- βœ… Users need to pass dynamic values (parameters, variables, computed expressions) +- βœ… The SQL Server syntax actually supports expressions in that position +- βœ… Backward compatibility must be maintained (literals still work) + +### ⚠️ Critical Warning: Shared Grammar Rules + +**DO NOT modify shared grammar rules** like `identifierColumnReferenceExpression` that are used throughout the codebase. This can cause: +- βœ… Other tests to fail unexpectedly +- βœ… Unintended syntax acceptance in different contexts +- βœ… Breaking changes in existing functionality + +**Instead, create specialized rules** for your specific use case: +```antlr +// ❌ WRONG: Modifying shared rule +identifierColumnReferenceExpression: multiPartIdentifier[2] // Affects ALL usage + +// βœ… CORRECT: Create specialized rule +vectorSearchColumnReferenceExpression: multiPartIdentifier[2] // Only for VECTOR_SEARCH +``` + +### Common Expression Types to Support + +When extending from literals to expressions, consider supporting: +- **Parameters**: `@parameter` +- **Variables**: `@variable` +- **Column references**: `table.column` +- **Outer references**: `outerref.column` +- **Function calls**: `FUNCTION(args)` +- **Arithmetic expressions**: `value + 1` +- **Case expressions**: `CASE WHEN ... END` + +### Files Typically Modified + +1. **`Ast.xml`**: Change member type from specific literal to `ScalarExpression` +2. **`TSql*.g`**: Update variable declarations and parsing rules +3. **Test files**: Add comprehensive test coverage +4. **Script generators**: Usually no changes needed for well-designed generators +By following these steps, you can ensure that new syntax is correctly parsed, represented in the AST, generated back into a script, and fully validated by the testing framework. + +## Special Case: Parser Predicate Recognition Issues + +If you encounter a bug where: +- An identifier-based predicate (like `REGEXP_LIKE`) works without parentheses: `WHERE REGEXP_LIKE('a', 'pattern')` βœ… +- But fails with parentheses: `WHERE (REGEXP_LIKE('a', 'pattern'))` ❌ +- The error is a syntax error near the closing parenthesis or semicolon + +This is likely a **parser predicate recognition issue**. The grammar and AST are correct, but the `IsNextRuleBooleanParenthesis()` function doesn't recognize the identifier-based predicate. + +**Solution**: Follow the [Parser Predicate Recognition Fix Guide](parser.guidelines.instructions.md) instead of the standard grammar modification workflow. + +## Decision Tree: Which Guide to Use? + +``` +Start: You have a parsing bug +β”‚ +β”œβ”€β†’ Error: "Option 'X' is not valid..." or "Feature not supported..." +β”‚ └─→ Does similar syntax work elsewhere? (e.g., ALTER INDEX works) +β”‚ └─→ YES: Use [grammar_validation.guidelines.instructions.md](grammar_validation.guidelines.instructions.md) +β”‚ +β”œβ”€β†’ Error: "Incorrect syntax near..." or parser doesn't recognize syntax +β”‚ └─→ Does the grammar need new rules or AST nodes? +β”‚ └─→ YES: Use this guide (BUG_FIXING_GUIDE.md) +β”‚ +└─→ Error: Parentheses cause failure with identifier predicates + └─→ Does `WHERE PREDICATE(...)` work but `WHERE (PREDICATE(...))` fail? + └─→ YES: Use [parser.guidelines.instructions.md](parser.guidelines.instructions.md) +``` + +## Quick Reference: Fix Types by Symptom + +| Symptom | Fix Type | Guide | Files Modified | +|---------|----------|-------|----------------| +| "Option 'X' is not valid in statement Y" | Validation | [grammar_validation.guidelines.instructions.md](grammar_validation.guidelines.instructions.md) | `TSql80ParserBaseInternal.cs` | +| "Incorrect syntax near keyword" | Grammar | This guide | `TSql*.g`, `Ast.xml`, Script generators | +| Parentheses break identifier predicates | Predicate Recognition | [parser.guidelines.instructions.md](parser.guidelines.instructions.md) | `TSql80ParserBaseInternal.cs` | +| Literal needs to become expression | Grammar Extension | [grammer.guidelines.instructions.md](grammer.guidelines.instructions.md) | `Ast.xml`, `TSql*.g` | + diff --git a/.github/instructions/debugging_workflow.guidelines.instructions.md b/.github/instructions/debugging_workflow.guidelines.instructions.md new file mode 100644 index 0000000..59932c8 --- /dev/null +++ b/.github/instructions/debugging_workflow.guidelines.instructions.md @@ -0,0 +1,252 @@ +# ScriptDOM Debugging Workflow - Quick Reference + +This is a visual guide for quickly diagnosing and fixing bugs in SqlScriptDOM. Use this as your first stop when encountering a parsing issue. + +## πŸ” Quick Diagnosis Flowchart + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ You have a parsing error/bug β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Step 1: Search for the error message β”‚ +β”‚ Command: grep -r "SQL46057" SqlScriptDom/ β”‚ +β”‚ grep -r "your error text" SqlScriptDom/ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό + β”Œβ”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β” + β”‚ Error Type? β”‚ + β””β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”Όβ”€β”€β”€β”€β”€β”€β”€β”€β” + β”‚ β”‚ β”‚ + β–Ό β–Ό β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β” +β”‚"Optionβ”‚ β”‚"Syntaxβ”‚ β”‚Parens β”‚ +β”‚not β”‚ β”‚error β”‚ β”‚break β”‚ +β”‚valid" β”‚ β”‚near..." β”‚predicteβ”‚ +β””β”€β”€β”€β”¬β”€β”€β”€β”˜ β””β”€β”€β”€β”¬β”€β”€β”€β”˜ β””β”€β”€β”€β”¬β”€β”€β”€β”˜ + β”‚ β”‚ β”‚ + β–Ό β–Ό β–Ό + β”Œβ”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β” + β”‚VALIDβ”‚ β”‚GRAM β”‚ β”‚PRED β”‚ + β”‚ATIONβ”‚ β”‚MAR β”‚ β”‚RECOGβ”‚ + β””β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”˜ +``` + +## πŸ“‹ Error Pattern Recognition + +### Pattern 1: Validation Error (Most Common) +**Symptoms:** +- ❌ `SQL46057: Option 'RESUMABLE' is not a valid index option in 'ALTER TABLE' statement` +- ❌ `Feature 'X' is not supported in SQL Server version Y` +- βœ… Same syntax works in different context (ALTER INDEX vs ALTER TABLE) + +**Quick Check:** +```bash +# Search for similar working syntax +grep -r "RESUMABLE" Test/SqlDom/TestScripts/ +# Found in AlterIndexTests but not AlterTableTests? +# β†’ It's a validation issue! +``` + +**Solution:** [Validation_fix.guidelines.instructions.md](Validation_fix.guidelines.instructions.md) +**Files to Check:** `TSql80ParserBaseInternal.cs` (validation methods) + +--- + +### Pattern 2: Grammar Error +**Symptoms:** +- ❌ `Incorrect syntax near keyword 'NEWKEYWORD'` +- ❌ Parser doesn't recognize new T-SQL feature at all +- ❌ Syntax has never been implemented + +**Quick Check:** +```bash +# Search for the keyword in grammar files +grep -r "YourKeyword" SqlScriptDom/Parser/TSql/*.g +# Not found? β†’ It's a grammar issue! +``` + +**Solution:** [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) +**Files to Modify:** `TSql*.g`, `Ast.xml`, Script generators + +--- + +### Pattern 3: Predicate Recognition Error +**Symptoms:** +- βœ… `WHERE REGEXP_LIKE('a', 'pattern')` works +- ❌ `WHERE (REGEXP_LIKE('a', 'pattern'))` fails with syntax error +- ❌ Error near closing parenthesis or semicolon + +**Quick Check:** +```bash +# Test both syntaxes +echo "SELECT 1 WHERE REGEXP_LIKE('a', 'b');" > test1.sql +echo "SELECT 1 WHERE (REGEXP_LIKE('a', 'b'));" > test2.sql +# Second one fails? β†’ Predicate recognition issue! +``` + +**Solution:** [parser.guidelines.instructions.md](parser.guidelines.instructions.md) +**Files to Modify:** `TSql80ParserBaseInternal.cs` (`IsNextRuleBooleanParenthesis()`) + +--- + +## πŸ› οΈ Standard Investigation Steps + +### Step 1: Reproduce Minimal Test Case +```bash +# Create minimal failing SQL +echo "ALTER TABLE t ADD CONSTRAINT pk PRIMARY KEY (id) WITH (RESUMABLE = ON);" > test.sql + +# Try parsing (use existing test harness or create simple parser test) +``` + +### Step 2: Find Error Source +```bash +# Search for error code/message +grep -r "SQL46057" SqlScriptDom/ +grep -r "is not a valid" SqlScriptDom/ + +# Common locations: +# - SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs (validation) +# - SqlScriptDom/Parser/TSql/TSql*.g (grammar rules) +# - SqlScriptDom/ScriptDom/SqlServer/*Helper.cs (option/type helpers) +``` + +### Step 3: Check Microsoft Documentation +```bash +# Search for: "Applies to: SQL Server 20XX (XX.x)" +# Verify exact version support +# Note: Different features may have different version requirements! +``` + +### Step 4: Locate Similar Working Code +```bash +# If ALTER INDEX works but ALTER TABLE doesn't: +grep -r "Resumable" Test/SqlDom/TestScripts/ + +# Find where option is registered: +grep -r "IndexOptionKind.Resumable" SqlScriptDom/ + +# Check validation paths: +grep -r "IndexAffectingStatement" SqlScriptDom/Parser/TSql/ +``` + +## πŸ”§ Fix Implementation Checklist + +### For Validation Fixes: +- [ ] Identify validation function (usually in `TSql80ParserBaseInternal.cs`) +- [ ] Check SQL Server version support in Microsoft docs +- [ ] Add version-gated validation (not unconditional rejection) +- [ ] Create test cases with version-specific expectations +- [ ] Build and run full test suite + +### For Grammar Fixes: +- [ ] Update grammar rules in `TSql*.g` files +- [ ] Update AST in `Ast.xml` if needed +- [ ] Update script generators +- [ ] Create test scripts and baselines +- [ ] Build parser and run tests + +### For Predicate Recognition: +- [ ] Locate `IsNextRuleBooleanParenthesis()` in `TSql80ParserBaseInternal.cs` +- [ ] Add identifier detection logic +- [ ] Add test cases with parentheses +- [ ] Verify non-parentheses syntax still works + +## πŸ“Š Version Mapping Reference + +Quick reference for SqlVersionFlags: + +| Flag | SQL Server Version | Year | Common Features | +|------|-------------------|------|-----------------| +| TSql80AndAbove | 2000 | 2000 | Basic T-SQL | +| TSql90AndAbove | 2005 | 2005 | XML, CTEs | +| TSql100AndAbove | 2008 | 2008 | MERGE, FILESTREAM | +| TSql110AndAbove | 2012 | 2012 | Sequences, Window Functions | +| TSql120AndAbove | 2014 | 2014 | In-Memory OLTP, MAX_DURATION | +| TSql130AndAbove | 2016 | 2016 | JSON, Temporal Tables | +| TSql140AndAbove | 2017 | 2017 | Graph, STRING_AGG | +| TSql150AndAbove | 2019 | 2019 | UTF-8, Intelligent QP | +| TSql160AndAbove | 2022 | 2022 | RESUMABLE constraints, JSON improvements | +| TSql170AndAbove | 2025 | 2025 | VECTOR_SEARCH, AI features | + +## πŸ§ͺ Testing Commands Reference + +```bash +# Build parser only +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Build tests +dotnet build Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Run specific test +dotnet test --filter "FullyQualifiedName~YourTestName" -c Debug + +# Run specific test file pattern +dotnet test --filter "DisplayName~AlterTableResumable" -c Debug + +# Run full suite (ALWAYS do this before committing!) +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Run with detailed output +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug -v detailed +``` + +## πŸ“‚ Key Files Reference + +| File | Purpose | When to Modify | +|------|---------|---------------| +| `TSql80ParserBaseInternal.cs` | Base validation logic | Validation fixes, common logic | +| `TSql160ParserBaseInternal.cs` | Version-specific overrides | Version-specific validation | +| `TSql*.g` | Grammar rules | New syntax, grammar changes | +| `Ast.xml` | AST node definitions | New nodes, type changes | +| `IndexOptionHelper.cs` | Option registration | New options, version mappings | +| `CodeGenerationSupporter.cs` | String constants | New keywords | +| `SqlScriptGeneratorVisitor*.cs` | Script generation | Generating SQL from AST | +| `Only*SyntaxTests.cs` | Test configuration | Test expectations per version | +| `TestScripts/*.sql` | Input test cases | New test SQL | +| `Baselines*/*.sql` | Expected output | Expected formatted SQL | + +## 🚫 Common Pitfalls to Avoid + +1. **❌ Modifying shared grammar rules** β†’ Creates unintended side effects + - βœ… Create context-specific rules instead + +2. **❌ Not running full test suite** β†’ Breaks existing functionality + - βœ… Always run ALL 1,100+ tests before committing + +3. **❌ Assuming same version for related features** β†’ Incorrect validation + - βœ… Check docs: MAX_DURATION (2014) β‰  RESUMABLE (2022) + +4. **❌ Forgetting script generator updates** β†’ Round-trip fails + - βœ… Test parse β†’ generate β†’ parse cycle + +5. **❌ Incorrect version flag logic** β†’ Wrong validation behavior + - βœ… Use `(flags & TSqlXXX) == 0` to check "NOT supported" + +## 🎯 Quick Decision Matrix + +| You Need To... | Use This Guide | Estimated Complexity | +|---------------|----------------|---------------------| +| Fix "option not valid" error | [Validation_fix.guidelines.instructions.md](Validation_fix.guidelines.instructions.md) | ⭐ Easy | +| Add new SQL keyword/operator | [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) | ⭐⭐⭐ Medium | +| Fix parentheses with predicates | [parser.guidelines.instructions.md](parser.guidelines.instructions.md) | ⭐⭐ Easy-Medium | +| Extend literal to expression | [grammer.guidelines.instructions.md](grammer.guidelines.instructions.md) | ⭐⭐⭐ Medium | +| Add new statement type | [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) | ⭐⭐⭐⭐ Hard | + +## πŸ“š Related Documentation + +- [copilot-instructions.md](../copilot-instructions.md) - Main project documentation +- [Validation_fix.guidelines.instructions.md](Validation_fix.guidelines.instructions.md) - Version-gated validation fixes +- [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) - Grammar modifications and AST updates +- [grammer.guidelines.instructions.md](grammer.guidelines.instructions.md) - Common extension patterns +- [parser.guidelines.instructions.md](parser.guidelines.instructions.md) - Parentheses recognition + +--- + +**Remember**: When in doubt, search for the error message first. Most bugs have been encountered before, and the error text will lead you to the right place in the code! diff --git a/.github/instructions/function.guidelines.instructions.md b/.github/instructions/function.guidelines.instructions.md new file mode 100644 index 0000000..09c0d4e --- /dev/null +++ b/.github/instructions/function.guidelines.instructions.md @@ -0,0 +1,363 @@ +# Guidelines for Adding New System Functions to SqlScriptDOM Parser + +This guide provides comprehensive instructions for adding new T-SQL system functions to the SqlScriptDOM parser, incorporating lessons learned from fixing JSON function parsing in RETURN statements. + +## Overview + +Adding a new system function involves three main components: +1. **AST Definition** (`Ast.xml`) - Define the abstract syntax tree node structure +2. **Grammar Rules** (`.g` files) - Define parsing logic for the function syntax +3. **Script Generator** - Handle conversion from AST back to T-SQL text +4. **Testing** - Ensure functionality works correctly across all contexts + +## Key Principle: Support Functions in RETURN Statements + +**Critical Requirement**: New system functions must be parseable in `ALTER FUNCTION` RETURN statements. This requires special handling due to ANTLR v2's limitation with semantic predicates during syntactic predicate lookahead. + +### The RETURN Statement Challenge + +The `returnStatement` grammar rule uses a syntactic predicate for lookahead: +```antlr +returnStatement: Return ((expression) => expression)? semicolonOpt +``` + +During lookahead, ANTLR cannot evaluate semantic predicates (which check runtime values like `vResult.FunctionName.Value`). This causes new functions to fail parsing in RETURN contexts even if they work elsewhere. + +## Why SELECT Works but RETURN Fails (The Core Problem) + +This section explains the fundamental issue we encountered with JSON functions and why it affects any new system function. + +### SELECT Statement Context (Always Works) +```sql +SELECT JSON_ARRAY('name'); -- βœ… Always worked +``` + +**Grammar Path**: `selectStatement` β†’ `selectElementsList` β†’ `selectElement` β†’ `expression` β†’ `expressionPrimary` β†’ `builtInFunctionCall` + +**Why it works**: No syntactic predicates in the path - parser can evaluate semantic predicates normally during parsing. + +### RETURN Statement Context (Previously Failed) +```sql +RETURN JSON_ARRAY('name'); -- ❌ Failed before our fix +``` + +**Grammar Path**: `returnStatement` uses syntactic predicate `((expression) =>` for lookahead + +**Why it failed**: +1. Parser encounters `RETURN JSON_ARRAY(...)` +2. Syntactic predicate triggers lookahead to check if `JSON_ARRAY(...)` is a valid expression +3. During lookahead: `expression` β†’ `expressionPrimary` β†’ `builtInFunctionCall` +4. `builtInFunctionCall` has semantic predicate: `{(vResult.FunctionName.Value == "JSON_ARRAY")}?` +5. **ANTLR v2 limitation**: Cannot evaluate `vResult.FunctionName.Value` during lookahead (object doesn't exist yet) +6. Lookahead fails β†’ parser assumes not an expression β†’ syntax error + +**The Solution**: Add token-based syntactic predicates in `expressionPrimary` that work during lookahead: +```antlr +{NextTokenMatches(CodeGenerationSupporter.JsonArray) && (LA(2) == LeftParenthesis)}? +vResult=jsonArrayCall +``` + +This is why **every new system function** must include the syntactic predicate pattern to work in RETURN statements. + +## Step-by-Step Implementation Guide + +### 1. Update AST Definition (`SqlScriptDom/Parser/TSql/Ast.xml`) + +Define the function's AST node structure: + +```xml + + + + + + +``` + +**Best Practice**: Use `ScalarExpression` for parameters that should support: +- Literals (`'value'`, `123`) +- Parameters (`@param`) +- Variables (`@variable`) +- Column references (`table.column`) +- Computed expressions (`value + 1`) + +Use specific literal types only when the SQL syntax strictly requires literals. + +### 2. Add Grammar Rules (`.g` files) + +#### 2a. Define the Function Rule + +Add to the appropriate grammar files (typically `TSql160.g`, `TSql170.g`, `TSqlFabricDW.g`): + +```antlr +yourNewFunctionCall returns [YourNewFunctionCall vResult = FragmentFactory.CreateFragment()] +{ + ScalarExpression vParam1; + StringLiteral vParam2; +} + : + tFunction:Identifier LeftParenthesis + { + Match(tFunction, CodeGenerationSupporter.YourFunctionName); + UpdateTokenInfo(vResult, tFunction); + } + vParam1 = expression + { + vResult.Parameter1 = vParam1; + } + (Comma vParam2 = stringLiteral + { + vResult.Parameter2 = vParam2; + })? + RightParenthesis + ; +``` + +#### 2b. **CRITICAL**: Add Syntactic Predicate for RETURN Statement Support + +Add to `expressionPrimary` rule **before** the generic `(Identifier LeftParenthesis)` predicate: + +```antlr +expressionPrimary returns [PrimaryExpression vResult] + // ... existing rules ... + + // Add BEFORE the generic identifier predicate + | {NextTokenMatches(CodeGenerationSupporter.YourFunctionName) && (LA(2) == LeftParenthesis)}? + vResult=yourNewFunctionCall + + // ... rest of existing rules including the generic identifier case ... + | (Identifier LeftParenthesis) => vResult=builtInFunctionCall +``` + +**Why This is Required**: +- The syntactic predicate uses `NextTokenMatches()` which works during lookahead +- It must come **before** the generic `builtInFunctionCall` predicate +- This enables the function to be recognized in RETURN statements + +#### 2c. Add to Built-in Function Call (if needed) + +If your function should also be recognized through the general built-in function mechanism, add it to `builtInFunctionCall`: + +```antlr +builtInFunctionCall returns [FunctionCall vResult = FragmentFactory.CreateFragment()] + // ... existing cases ... + + | {(vResult.FunctionName.Value == "YOUR_FUNCTION_NAME")}? + vResult=yourNewFunctionCall +``` + +### 3. Update CodeGenerationSupporter Constants + +Add the function name constant to `CodeGenerationSupporter.cs`: + +```csharp +public const string YourFunctionName = "YOUR_FUNCTION_NAME"; +``` + +### 4. Create Script Generator + +Add visitor method to handle AST-to-script conversion in the appropriate script generator file: + +```csharp +public override void ExplicitVisit(YourNewFunctionCall node) +{ + GenerateIdentifier(CodeGenerationSupporter.YourFunctionName); + GenerateSymbol(TSqlTokenType.LeftParenthesis); + + if (node.Parameter1 != null) + { + GenerateFragmentIfNotNull(node.Parameter1); + + if (node.Parameter2 != null) + { + GenerateSymbol(TSqlTokenType.Comma); + GenerateSpace(); + GenerateFragmentIfNotNull(node.Parameter2); + } + } + + GenerateSymbol(TSqlTokenType.RightParenthesis); +} +``` + +### 5. Integrate with Grammar Hierarchy + +Add your function to the appropriate place in the grammar hierarchy: + +```antlr +// Add to function call expressions +functionCall returns [FunctionCall vResult] + : vResult=yourNewFunctionCall + | // ... other function types + ; + +// Or add to primary expressions if it's a primary expression type +primaryExpression returns [PrimaryExpression vResult] + : vResult=yourNewFunctionCall + | // ... other primary expressions + ; +``` + +### 6. Build and Test + +#### 6a. Build the Project + +```bash +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug +``` + +This will regenerate parser files from the grammar. + +#### 6b. Create Test Files + +**YOU MUST ADD UNIT TESTS - DO NOT CREATE STANDALONE PROGRAMS TO TEST** + +Create test script in `Test/SqlDom/TestScripts/YourFunctionTests160.sql`: + +```sql +-- Test basic function call +SELECT YOUR_FUNCTION_NAME('param1', 'param2'); + +-- CRITICAL: Test in ALTER FUNCTION RETURN statement +ALTER FUNCTION TestYourFunction() +RETURNS NVARCHAR(MAX) +AS +BEGIN + RETURN (YOUR_FUNCTION_NAME('value1', 'value2')); +END; +GO +``` + +#### 6c. Generate Baselines + +1. Create placeholder baseline file: `Test/SqlDom/Baselines160/YourFunctionTests160.sql` +2. Run the test (it will fail) +3. Copy the "Actual" output from the test failure +4. Update the baseline file with the correctly formatted output + +#### 6d. Configure Test + +Add test entry to `Test/SqlDom/Only160SyntaxTests.cs`: + +```csharp +new ParserTest160("YourFunctionTests160.sql", nErrors80: 1, nErrors90: 1, nErrors100: 1, nErrors110: 1, nErrors120: 1, nErrors130: 1, nErrors140: 1, nErrors150: 1), +``` + +Adjust error counts based on which SQL versions should support your function. + +#### 6e. Run Full Test Suite + +```bash +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +``` + +Ensure all tests pass, including existing ones (no regressions). + +## Real-World Example: JSON Functions Fix + +This guide incorporates lessons learned from fixing `JSON_OBJECT` and `JSON_ARRAY` parsing in RETURN statements: + +### Problem Encountered +```sql +-- This worked fine: +SELECT JSON_ARRAY('name'); -- βœ… Always worked + +-- This failed before the fix: +ALTER FUNCTION GetAuth() RETURNS NVARCHAR(MAX) AS BEGIN + RETURN (JSON_OBJECT('key': 'value')); -- ❌ Parse error here +END; +``` + +### Why SELECT Worked but RETURN Didn't + +**SELECT Statement Context** (Always Worked): +```sql +SELECT JSON_ARRAY('name'); +``` +In a SELECT statement, the parser follows this path: +1. `selectStatement` β†’ `queryExpression` β†’ `querySpecification` +2. `selectElementsList` β†’ `selectElement` β†’ `expression` +3. `expression` β†’ `expressionPrimary` β†’ `builtInFunctionCall` +4. βœ… No syntactic predicate blocking the path + +**RETURN Statement Context** (Previously Failed): +```sql +RETURN JSON_ARRAY('name'); +``` +In a RETURN statement, the parser follows this path: +1. `returnStatement` uses a **syntactic predicate**: `((expression) =>` +2. During lookahead, parser tries: `expression` β†’ `expressionPrimary` β†’ `builtInFunctionCall` +3. `builtInFunctionCall` has a **semantic predicate**: `{(vResult.FunctionName.Value == "JSON_ARRAY")}?` +4. ❌ **ANTLR v2 limitation**: Semantic predicates cannot be evaluated during syntactic predicate lookahead +5. ❌ Lookahead fails β†’ parser doesn't recognize `JSON_ARRAY` as valid expression + +### Root Cause +The semantic predicate `{(vResult.FunctionName.Value == "JSON_OBJECT")}?` in `builtInFunctionCall` could not be evaluated during the syntactic predicate lookahead in `returnStatement`. + +### Solution Applied +Added syntactic predicates in `expressionPrimary`: + +```antlr +// Added before generic identifier predicate +| {NextTokenMatches(CodeGenerationSupporter.JsonObject) && (LA(2) == LeftParenthesis)}? + vResult=jsonObjectCall +| {NextTokenMatches(CodeGenerationSupporter.JsonArray) && (LA(2) == LeftParenthesis)}? + vResult=jsonArrayCall +``` + +This uses token-based checking (`NextTokenMatches`) which works during lookahead, unlike semantic predicates. + +## Grammar Files to Modify + +For SQL Server 2022+ functions, typically modify: +- `SqlScriptDom/Parser/TSql/TSql160.g` (SQL Server 2022) +- `SqlScriptDom/Parser/TSql/TSql170.g` (SQL Server 2025) +- `SqlScriptDom/Parser/TSql/TSqlFabricDW.g` (Azure Synapse) + +For earlier versions, add to appropriate grammar files (`TSql150.g`, `TSql140.g`, etc.). + +## Common Pitfalls + +1. **Forgetting RETURN Statement Support**: Always add syntactic predicates to `expressionPrimary` +2. **Wrong Predicate Order**: Syntactic predicates must come **before** generic predicates +3. **Semantic Predicates in Lookahead**: Don't rely on semantic predicates in contexts with syntactic predicate lookahead +4. **Missing Script Generator**: Every AST node needs a corresponding script generation visitor +5. **Incomplete Testing**: Test both standalone function calls and RETURN statement usage +6. **Version Compatibility**: Consider which SQL versions should support your function + +## Testing Checklist + +- [ ] Function parses in SELECT statements +- [ ] Function parses in WHERE clauses +- [ ] **Function parses in ALTER FUNCTION RETURN statements** +- [ ] Function parses with literal parameters +- [ ] Function parses with variable parameters +- [ ] Function parses with computed expressions as parameters +- [ ] Script generation produces correct T-SQL output +- [ ] Round-trip parsing (parse β†’ generate β†’ parse) works +- [ ] No regressions in existing tests +- [ ] Appropriate error handling for invalid syntax + +## Architecture Notes + +### Why Syntactic vs Semantic Predicates Matter + +- **Syntactic Predicates**: Can check token types during lookahead (`LA()`, `NextTokenMatches()`) +- **Semantic Predicates**: Check runtime values, but fail during lookahead in syntactic predicates +- **RETURN Statement Context**: Uses syntactic predicate `((expression) =>` which triggers lookahead + +### Grammar Rule Hierarchy + +``` +returnStatement + └── expression + └── expressionPrimary + β”œβ”€β”€ yourNewFunctionCall (syntactic predicate) + └── builtInFunctionCall (semantic predicate) +``` + +By adding syntactic predicates to `expressionPrimary`, we catch function calls before they reach the problematic semantic predicate in `builtInFunctionCall`. + +## Summary + +Following this guide ensures new system functions work correctly in all T-SQL contexts, especially the challenging RETURN statement scenario. The key insight is that ANTLR v2's limitations require careful predicate ordering and the use of token-based syntactic predicates for functions that need to work in lookahead contexts. \ No newline at end of file diff --git a/.github/instructions/grammar_validation.guidelines.instructions.md b/.github/instructions/grammar_validation.guidelines.instructions.md new file mode 100644 index 0000000..f02f9fe --- /dev/null +++ b/.github/instructions/grammar_validation.guidelines.instructions.md @@ -0,0 +1,359 @@ +# Validation-Based Bug Fix Guide for SqlScriptDOM + +This guide covers bugs where the **grammar already supports the syntax**, but the parser incorrectly rejects it due to validation logic. This is different from grammar-level fixes where you need to add new parsing rules. + +## When to Use This Guide + +Use this pattern when: +- βœ… The syntax **should** parse based on SQL Server documentation +- βœ… The error message is a **validation error** (e.g., "SQL46057: Option 'X' is not valid...") +- βœ… Similar syntax works in **other contexts** (e.g., ALTER INDEX works but ALTER TABLE fails) +- βœ… The feature was **added in a newer SQL Server version** but is rejected even in the correct parser + +**Do NOT use this guide when:** +- ❌ Grammar rules need to be added/modified (use [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) instead) +- ❌ AST nodes need to be created (use [grammer.guidelines.instructions.md](grammer.guidelines.instructions.md)) +- ❌ The syntax never existed in SQL Server + +## Real-World Example: ALTER TABLE RESUMABLE Option + +### The Problem + +User reported this SQL failed to parse: +```sql +ALTER TABLE table1 +ADD CONSTRAINT PK_Constraint PRIMARY KEY CLUSTERED (a) +WITH (ONLINE = ON, MAXDOP = 2, RESUMABLE = ON, MAX_DURATION = 240); +``` + +**Error**: `SQL46057: Option 'RESUMABLE' is not a valid index option in 'ALTER TABLE' statement.` + +**But**: The same options worked fine in `ALTER INDEX` statements. + +### Investigation Steps + +#### 1. Search for the Error Message +```bash +# Search for the error code or message text +grep -r "SQL46057" SqlScriptDom/ +grep -r "is not a valid index option" SqlScriptDom/ +``` + +**Result**: Found in `TSql80ParserBaseInternal.cs` in the `VerifyAllowedIndexOption()` method. + +#### 2. Examine the Validation Logic +```csharp +// Location: SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs +protected void VerifyAllowedIndexOption(IndexAffectingStatement statement, + IndexOption option, + SqlVersionFlags versionFlags) +{ + switch (statement) + { + case IndexAffectingStatement.AlterTableAddElement: + // BEFORE: Unconditionally blocked RESUMABLE and MAX_DURATION + if (option.OptionKind == IndexOptionKind.Resumable || + option.OptionKind == IndexOptionKind.MaxDuration) + { + ThrowParseErrorException("SQL46057", /* ... */); + } + break; + // ... other cases ... + } +} +``` + +**Key Finding**: The validation was **hardcoded** to reject these options for ALTER TABLE, regardless of SQL Server version. + +#### 3. Check Microsoft Documentation +Always verify the **exact SQL Server version** support: +- **RESUMABLE**: Introduced in SQL Server 2022 (version 160) +- **MAX_DURATION**: Introduced in SQL Server 2014 (version 120) for low-priority locks, extended for resumable operations + +**Important**: Different options can have different version requirements even within the same feature set! + +### The Fix + +#### Step 1: Identify Version Flags + +The codebase uses `SqlVersionFlags` for version checking: +- `TSql80AndAbove` = SQL Server 2000+ +- `TSql90AndAbove` = SQL Server 2005+ +- `TSql100AndAbove` = SQL Server 2008+ +- `TSql110AndAbove` = SQL Server 2012+ +- `TSql120AndAbove` = SQL Server 2014+ +- `TSql130AndAbove` = SQL Server 2016+ +- `TSql140AndAbove` = SQL Server 2017+ +- `TSql150AndAbove` = SQL Server 2019+ +- `TSql160AndAbove` = SQL Server 2022+ +- `TSql170AndAbove` = SQL Server 2025+ + +#### Step 2: Apply Version-Gated Validation + +Replace unconditional rejection with version checking: + +```csharp +case IndexAffectingStatement.AlterTableAddElement: + // Invalidate RESUMABLE for versions before SQL Server 2022 (160) + // Invalidate MAX_DURATION for versions before SQL Server 2014 (120) + if (((versionFlags & SqlVersionFlags.TSql160AndAbove) == 0 && + option.OptionKind == IndexOptionKind.Resumable) || + ((versionFlags & SqlVersionFlags.TSql120AndAbove) == 0 && + option.OptionKind == IndexOptionKind.MaxDuration)) + { + // Throw an error indicating the option is not supported in the current SQL Server version + ThrowParseErrorException("SQL46057", "Option not supported in this SQL Server version."); + } + break; +``` + +**Pattern Explanation**: +- `(versionFlags & SqlVersionFlags.TSql160AndAbove) == 0` β†’ Returns true if parser version < 160 +- If true AND option is RESUMABLE β†’ Throw error (option not supported yet) +- Same pattern for MAX_DURATION with TSql120AndAbove + +#### Step 3: Create Comprehensive Tests + +**Test Script**: `Test/SqlDom/TestScripts/AlterTableResumableTests160.sql` +```sql +-- Test 1: RESUMABLE with MAX_DURATION (minutes) +ALTER TABLE dbo.MyTable +ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) +WITH (RESUMABLE = ON, MAX_DURATION = 240 MINUTES); + +-- Test 2: RESUMABLE = ON +ALTER TABLE dbo.MyTable +ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) +WITH (RESUMABLE = ON); + +-- Test 3: RESUMABLE = OFF +ALTER TABLE dbo.MyTable +ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) +WITH (RESUMABLE = OFF); + +-- Test 4: UNIQUE constraint with RESUMABLE +ALTER TABLE dbo.MyTable +ADD CONSTRAINT uq_test UNIQUE NONCLUSTERED (name) +WITH (RESUMABLE = ON); +``` + +#### Step 4: Configure Test Expectations + +**Test Configuration**: `Test/SqlDom/Only160SyntaxTests.cs` +```csharp +new ParserTest160("AlterTableResumableTests160.sql"), +``` + +#### Step 5: Create Baseline Files + +**Baseline**: `Test/SqlDom/Baselines160/AlterTableResumableTests160.sql` +```sql +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = ON, MAX_DURATION = 240 MINUTES); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = ON); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = OFF); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT uq_test UNIQUE NONCLUSTERED (name) WITH (RESUMABLE = ON); +``` + +#### Step 6: Validate the Fix + +```bash +# Build to ensure code compiles +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Run specific test +dotnet test --filter "FullyQualifiedName~AlterTableResumableTests" -c Debug + +# Run FULL test suite to catch regressions +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +``` + +**Expected Results**: +- βœ… TSql160Parser: 0 errors (all tests pass) +- βœ… TSql80-150Parsers: 4 errors each (RESUMABLE correctly rejected) +- βœ… Full suite: 1,116 tests passed, 0 failed + +## Common Validation Patterns + +### Pattern 1: Version-Gated Validation +```csharp +// Allow feature only in specific SQL Server versions +if ((versionFlags & SqlVersionFlags.TSqlXXXAndAbove) == 0 && + condition) +{ + ThrowParseErrorException(...); +} +``` + +### Pattern 2: Multiple Version Requirements +```csharp +// Different features with different version requirements +if (((versionFlags & SqlVersionFlags.TSql160AndAbove) == 0 && feature1) || + ((versionFlags & SqlVersionFlags.TSql120AndAbove) == 0 && feature2)) +{ + ThrowParseErrorException(...); +} +``` + +### Pattern 3: Context-Specific Validation +```csharp +// Same option, different rules for different statements +switch (statement) +{ + case IndexAffectingStatement.AlterTableAddElement: + // Stricter rules for ALTER TABLE + break; + case IndexAffectingStatement.CreateIndex: + // More permissive for CREATE INDEX + break; +} +``` + +## Key Files for Validation Fixes + +### 1. Validation Logic +- **`SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs`** + - Base validation shared by all parser versions + - Contains `VerifyAllowedIndexOption()`, `VerifyAllowedIndexType()`, etc. + - Most validation fixes happen here + +### 2. Version-Specific Overrides +- **`SqlScriptDom/Parser/TSql/TSql160ParserBaseInternal.cs`** + - Can override base validation for specific versions + - Example: `VerifyAllowedIndexOption160()` calls base then adds version-specific logic + +### 3. Option Registration +- **`SqlScriptDom/ScriptDom/SqlServer/IndexOptionHelper.cs`** + - Maps option keywords to `IndexOptionKind` enum values + - Defines version support: `AddOptionMapping(kind, keyword, versionFlags)` + - **Note**: Registration here controls grammar acceptance, validation happens separately + +### 4. Enums and Constants +- **`SqlScriptDom/ScriptDom/SqlServer/IndexAffectingStatement.cs`** + - Defines statement types: `CreateIndex`, `AlterIndex`, `AlterTableAddElement`, etc. + +- **`SqlScriptDom/ScriptDom/SqlServer/IndexOptionKind.cs`** + - Defines option types: `Resumable`, `MaxDuration`, `Online`, etc. + +## Debugging Workflow + +### Step 1: Reproduce the Error +```bash +# Create a minimal test file +echo "ALTER TABLE t ADD CONSTRAINT pk PRIMARY KEY (id) WITH (RESUMABLE = ON);" > test.sql + +# Try parsing it (will fail) +# Use your test harness or create a simple parser test +``` + +### Step 2: Find the Error Source +```bash +# Search for error code +grep -r "SQL46057" SqlScriptDom/ + +# Search for error message text +grep -r "is not a valid" SqlScriptDom/ +``` + +### Step 3: Locate Validation Function +Common validation functions to check: +- `VerifyAllowedIndexOption()` - Most common +- `VerifyAllowedIndexType()` +- `VerifyFeatureSupport()` +- `CheckFeatureAvailability()` + +### Step 4: Examine the Logic +Look for: +- Hardcoded rejections (unconditional throws) +- Version checks that are too strict +- Missing version flag checks +- Incorrect version constants + +### Step 5: Check Similar Working Cases +If ALTER INDEX works but ALTER TABLE doesn't: +- Compare their validation paths +- Check for different `switch` cases +- Look for statement-type specific logic + +## Testing Strategy + +### Test Coverage Checklist +- [ ] Test with option enabled (`OPTION = ON`) +- [ ] Test with option disabled (`OPTION = OFF`) +- [ ] Test with option + other options (`OPTION = ON, OTHER_OPTION = value`) +- [ ] Test different statement types (PRIMARY KEY, UNIQUE, etc.) +- [ ] Test across all SQL Server versions (verify error counts) + +### Version-Specific Error Expectations +```csharp +// Pattern for test configuration +new ParserTestXXX("TestFile.sql", + nErrors80: X, // Count errors for SQL 2000 + nErrors90: X, // Count errors for SQL 2005 + nErrors100: X, // Count errors for SQL 2008 + nErrors110: X, // Count errors for SQL 2012 + nErrors120: Y, // May differ if feature added in 2014 + nErrors130: Y, // Same as above + nErrors140: Y, // Same as above + nErrors150: Y, // Same as above + // nErrors160: 0 (implicit) - Feature supported in 2022+ +) +``` + +## Common Pitfalls + +### 1. Assuming Same Version for Related Features +❌ **Wrong**: "RESUMABLE and MAX_DURATION are both resumable features, so both need TSql160+" +βœ… **Correct**: Check documentation - MAX_DURATION existed before RESUMABLE (TSql120 vs TSql160) + +### 2. Not Running Full Test Suite +❌ **Wrong**: Only run the new test, assume it's fine +βœ… **Correct**: Run ALL tests - validation changes can affect unexpected areas + +### 3. Incorrect Version Flag Logic +❌ **Wrong**: `if (versionFlags & SqlVersionFlags.TSql160AndAbove)` (missing == 0) +βœ… **Correct**: `if ((versionFlags & SqlVersionFlags.TSql160AndAbove) == 0)` (check if NOT set) + +### 4. Forgetting Statement Context +❌ **Wrong**: Apply same validation to all statement types +βœ… **Correct**: Different statements may have different option support + +## Summary Checklist + +- [ ] **Identify** the validation function throwing the error +- [ ] **Verify** Microsoft documentation for exact version support +- [ ] **Apply** version-gated validation (not unconditional rejection) +- [ ] **Create** comprehensive test cases covering all scenarios +- [ ] **Configure** test expectations for all SQL Server versions +- [ ] **Generate** baseline files from actual parser output +- [ ] **Build** the ScriptDOM project successfully +- [ ] **Run** full test suite (ALL 1,100+ tests must pass) +- [ ] **Document** the fix with clear before/after examples + +## Related Guides + +- [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) - For grammar-level fixes +- [grammer.guidelines.instructions.md](grammer.guidelines.instructions.md) - For extending existing grammar +- [parser.guidelines.instructions.md](parser.guidelines.instructions.md) - For parentheses recognition issues + +## Real-World Examples + +### Example 1: ALTER TABLE RESUMABLE (SQL Server 2022) +- **File**: `TSql80ParserBaseInternal.cs` +- **Function**: `VerifyAllowedIndexOption()` +- **Fix**: Added `TSql160AndAbove` check for RESUMABLE +- **Tests**: `AlterTableResumableTests160.sql` + +### Example 2: MAX_DURATION (SQL Server 2014) +- **File**: Same as above +- **Function**: Same as above +- **Fix**: Added `TSql120AndAbove` check for MAX_DURATION +- **Tests**: Same file, different version expectations + +These examples demonstrate how validation fixes are often simpler than grammar changes - the parser already knows how to parse the syntax, it just needs permission to accept it in specific contexts and versions. diff --git a/.github/instructions/grammer.guidelines.instructions.md b/.github/instructions/grammer.guidelines.instructions.md new file mode 100644 index 0000000..53311f7 --- /dev/null +++ b/.github/instructions/grammer.guidelines.instructions.md @@ -0,0 +1,271 @@ +# Grammar Extension Patterns for SqlScriptDOM + +This guide documents common patterns for extending the SqlScriptDOM parser grammar to support new syntax or enhance existing functionality. + +## Pattern 1: Extending Literals to Expressions + +### When to Use +When existing grammar rules only accept literal values but need to support dynamic expressions like parameters, variables, or computed values. + +### Example Problem +Functions or constructs that currently accept only: +- `IntegerLiteral` (e.g., `TOP_N = 10`) +- `StringLiteral` (e.g., `VALUE = 'literal'`) + +But need to support: +- Parameters: `@parameter` +- Variables: `@variable` +- Column references: `table.column` +- Outer references: `outerref.column` +- Function calls: `FUNCTION(args)` +- Computed expressions: `value + 1` + +### ⚠️ Critical Warning: Avoid Modifying Shared Grammar Rules + +**DO NOT** modify existing shared grammar rules like `identifierColumnReferenceExpression` that are used throughout the codebase. This can cause unintended side effects and break other functionality. + +**Instead**, create specialized rules for your specific context. + +### Solution Template + +#### Step 1: Update AST Definition (`Ast.xml`) +```xml + + + + + +``` + +#### Step 2: Create Context-Specific Grammar Rule (`TSql*.g`) +```antlr +// Create a specialized rule for your context +yourContextColumnReferenceExpression returns [ColumnReferenceExpression vResult = this.FragmentFactory.CreateFragment()] +{ + MultiPartIdentifier vMultiPartIdentifier; +} + : + vMultiPartIdentifier=multiPartIdentifier[2] // Allows table.column syntax + { + vResult.ColumnType = ColumnType.Regular; + vResult.MultiPartIdentifier = vMultiPartIdentifier; + } + ; + +// Use the specialized rule in your custom grammar +yourContextParameterRule returns [ScalarExpression vResult] + : vResult=signedInteger + | vResult=variable + | vResult=yourContextColumnReferenceExpression // Context-specific rule + ; +``` + +#### Step 3: Verify Script Generator +Most script generators using `GenerateNameEqualsValue()` or similar methods work automatically with `ScalarExpression`. No changes typically needed. + +#### Step 4: Add Test Coverage +Add tests within the existing test framework: +```csharp +[TestMethod] +public void VerifyGrammarExtension() +{ + var parser = new TSql170Parser(true); + + // Test parameter + var sql1 = "SELECT FUNCTION_NAME(PARAM = @parameter)"; + var result1 = parser.Parse(new StringReader(sql1), out var errors1); + Assert.AreEqual(0, errors1.Count, "Parameter syntax should work"); + + // Test outer reference + var sql2 = "SELECT FUNCTION_NAME(PARAM = outerref.column)"; + var result2 = parser.Parse(new StringReader(sql2), out var errors2); + Assert.AreEqual(0, errors2.Count, "Outer reference syntax should work"); + + // Test computed expression + var sql3 = "SELECT FUNCTION_NAME(PARAM = value + 1)"; + var result3 = parser.Parse(new StringReader(sql3), out var errors3); + Assert.AreEqual(0, errors3.Count, "Computed expression syntax should work"); +} +``` + +**⚠️ CRITICAL**: Add this test method to an existing test class (e.g., `Only170SyntaxTests.cs`). **Never create standalone test projects.** + +### Real-World Example: VECTOR_SEARCH TOP_N + +**Problem**: `VECTOR_SEARCH` TOP_N parameter only accepted integer literals. + +**❌ Wrong Approach**: Modify `identifierColumnReferenceExpression` to use `multiPartIdentifier[2]` +- **Result**: Broke `CreateIndexStatementErrorTest` because other grammar rules started accepting invalid syntax + +**βœ… Correct Approach**: Create `vectorSearchColumnReferenceExpression` specialized for VECTOR_SEARCH +- **Result**: VECTOR_SEARCH supports multi-part identifiers without affecting other functionality + +**Final Implementation**: +```antlr +signedIntegerOrVariableOrColumnReference returns [ScalarExpression vResult] + : vResult=signedInteger + | vResult=variable + | vResult=vectorSearchColumnReferenceExpression // VECTOR_SEARCH-specific rule + ; + +vectorSearchColumnReferenceExpression returns [ColumnReferenceExpression vResult = ...] + : + vMultiPartIdentifier=multiPartIdentifier[2] // Allows table.column syntax + { + vResult.ColumnType = ColumnType.Regular; + vResult.MultiPartIdentifier = vMultiPartIdentifier; + } + ; +``` + +**Result**: Now supports dynamic TOP_N values: +```sql +-- Parameters +VECTOR_SEARCH(..., TOP_N = @k) AS ann + +-- Outer references +VECTOR_SEARCH(..., TOP_N = outerref.max_results) AS ann +``` + +## Pattern 2: Adding New Enum Members + +### When to Use +When adding new operators, keywords, or options to existing constructs. + +### Solution Template + +#### Step 1: Update Enum in AST (`Ast.xml`) +```xml + + + + + +``` + +#### Step 2: Update Grammar Rule (`TSql*.g`) +```antlr +// Add new token matching +| tNewValue:Identifier +{ + Match(tNewValue, CodeGenerationSupporter.NewValue); + vResult.EnumProperty = ExistingEnumType.NewValue; +} +``` + +#### Step 3: Update Script Generator +```csharp +// Add mapping in appropriate generator file +private static readonly Dictionary _enumGenerators = + new Dictionary() +{ + { EnumType.ExistingValue1, CodeGenerationSupporter.ExistingValue1 }, + { EnumType.ExistingValue2, CodeGenerationSupporter.ExistingValue2 }, + { EnumType.NewValue, CodeGenerationSupporter.NewValue }, // Add this +}; +``` + +## Pattern 3: Adding New Function or Statement + +### When to Use +When adding completely new T-SQL functions or statements. + +### Solution Template + +#### Step 1: Define AST Node (`Ast.xml`) +```xml + + + + +``` + +#### Step 2: Add Grammar Rule (`TSql*.g`) +```antlr +newFunctionCall returns [NewFunctionCall vResult = FragmentFactory.CreateFragment()] +{ + ScalarExpression vParam1; + StringLiteral vParam2; +} + : + tFunction:Identifier LeftParenthesis + { + Match(tFunction, CodeGenerationSupporter.NewFunction); + UpdateTokenInfo(vResult, tFunction); + } + vParam1 = expression + { + vResult.Parameter1 = vParam1; + } + Comma vParam2 = stringLiteral + { + vResult.Parameter2 = vParam2; + } + RightParenthesis + ; +``` + +#### Step 3: Integrate with Existing Rules +Add the new rule to appropriate places in the grammar (e.g., `functionCall`, `primaryExpression`, etc.). + +#### Step 4: Create Script Generator +```csharp +public override void ExplicitVisit(NewFunctionCall node) +{ + GenerateIdentifier(CodeGenerationSupporter.NewFunction); + GenerateSymbol(TSqlTokenType.LeftParenthesis); + GenerateFragmentIfNotNull(node.Parameter1); + GenerateSymbol(TSqlTokenType.Comma); + GenerateFragmentIfNotNull(node.Parameter2); + GenerateSymbol(TSqlTokenType.RightParenthesis); +} +``` + +## Best Practices + +### 1. Backward Compatibility +- Always ensure existing syntax continues to work +- Extend rather than replace existing rules +- Test both old and new syntax + +### 2. Testing Strategy +- Add comprehensive test cases in `TestScripts/` +- Update baseline files with expected output +- Test edge cases and error conditions + +### 3. Documentation +- Update grammar comments with new syntax +- Add examples in code comments +- Document any limitations or requirements + +### 4. Version Targeting +- Add new features to the appropriate SQL Server version grammar +- Consider whether feature should be backported to earlier versions +- Update all relevant grammar files if syntax is version-independent + +## Common Pitfalls + +### 1. Forgetting Script Generator Updates +- Grammar changes often require corresponding script generator changes +- Test the round-trip: parse β†’ generate β†’ parse again + +### 2. Incomplete Test Coverage +- Test all supported expression types when extending to `ScalarExpression` +- Include error cases and boundary conditions + +### 3. Missing Version Updates +- New syntax should be added to all relevant grammar versions +- Consider SQL Server version compatibility + +### 4. AST Design Issues +- Choose appropriate base classes for new AST nodes +- Consider reusing existing AST patterns where possible +- Ensure proper inheritance hierarchy + +## Reference Examples + +- **VECTOR_SEARCH TOP_N Extension**: Literal to expression pattern +- **REGEXP_LIKE Predicate**: Boolean parentheses recognition pattern +- **EVENT SESSION Predicates**: Function-style vs operator-style predicates + +For detailed step-by-step examples, see [BUG_FIXING_GUIDE.md](BUG_FIXING_GUIDE.md). \ No newline at end of file diff --git a/.github/instructions/new_data_types.guidelines.instructions.md b/.github/instructions/new_data_types.guidelines.instructions.md new file mode 100644 index 0000000..86e59a6 --- /dev/null +++ b/.github/instructions/new_data_types.guidelines.instructions.md @@ -0,0 +1,439 @@ +# Guidelines for Adding New Data Types to SqlScriptDOM + +This guide provides step-by-step instructions for adding support for completely new SQL Server data types to the SqlScriptDOM parser. This pattern was established from the Vector data type implementation (commits 38a0971 and cd69b78). + +## When to Use This Guide + +Use this pattern when: +- βœ… Adding a **completely new SQL Server data type** (e.g., VECTOR, GEOMETRY, GEOGRAPHY) +- βœ… The data type has **custom parameters** not handled by standard SQL data types +- βœ… The data type requires **specialized parsing logic** beyond simple name/size parameters +- βœ… The data type is **introduced in a specific SQL Server version** + +**Do NOT use this guide for:** +- ❌ Modifying existing data types (use [validation_fix.guidelines.instructions.md](validation_fix.guidelines.instructions.md)) +- ❌ Adding function syntax (use [function.guidelines.instructions.md](function.guidelines.instructions.md)) +- ❌ Simple keyword additions (use [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md)) + +## Real-World Example: Vector Data Type + +The Vector data type implementation demonstrates this pattern: + +### SQL Server Syntax Supported +```sql +-- Basic vector with dimension only +DECLARE @embedding AS VECTOR(1536); +CREATE TABLE tbl (embedding VECTOR(1536)); + +-- Vector with dimension and base type +DECLARE @embedding AS VECTOR(1536, FLOAT32); +CREATE TABLE tbl (embedding VECTOR(1536, FLOAT16)); +``` + +### Key Challenge Solved +The Vector type requires custom parsing because: +- **Standard data types** use size parameters: `VARCHAR(50)`, `DECIMAL(10,2)` +- **Vector type** uses dimension + optional base type: `VECTOR(1536, FLOAT32)` +- **Base type parameter** is an identifier (FLOAT16/FLOAT32), not a size literal + +## Step-by-Step Implementation Guide + +### 1. Define AST Node Structure (`Ast.xml`) + +Add a new class inheriting from `DataTypeReference`: + +```xml + + + + + + + + + +``` + +**Design Principles**: +- **Inherit from `DataTypeReference`**: All SQL data types inherit from this base class +- **Choose appropriate member types**: + - `IntegerLiteral`: For numeric parameters (dimensions, sizes) + - `Identifier`: For type names or keywords + - `StringLiteral`: For string parameters + - `ScalarExpression`: For complex expressions (use sparingly) +- **Optional parameters**: Members can be null for optional syntax + +### 2. Add Grammar Rule (`TSql*.g`) + +Create a specialized parsing rule for your data type: + +```antlr +// Location: SqlScriptDom/Parser/TSql/TSql170.g (or appropriate version) +// Add after xmlDataType rule (~line 30672) + +yourDataType [SchemaObjectName vName] returns [YourDataTypeReference vResult = FragmentFactory.CreateFragment()] +{ + vResult.Name = vName; + vResult.UpdateTokenInfo(vName); + + IntegerLiteral vParameter1 = null; + Identifier vParameter2 = null; +} + : + ( LeftParenthesis vParameter1=integer + { + vResult.Parameter1 = vParameter1; + } + ( + Comma vParameter2=identifier + { + vResult.Parameter2 = vParameter2; + } + )? + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + } + ) + ; +``` + +**Grammar Pattern Explanation**: +- **Function signature**: Takes `SchemaObjectName vName` parameter and returns your AST type +- **Variable declarations**: Declare variables for each parameter using appropriate types +- **Parameter parsing**: Use `integer`, `identifier`, `stringLiteral` based on syntax needs +- **Optional parameters**: Wrap in `( ... )?` syntax for optional elements +- **Token info updates**: Always call `UpdateTokenInfo()` for proper source location tracking + +### 3. Integrate with Scalar Data Type Rule + +Connect your new grammar rule to the main data type parsing logic: + +```antlr +// Location: SqlScriptDom/Parser/TSql/TSql170.g +// Find scalarDataType rule (~line 30694) and add your type check + +scalarDataType returns [DataTypeReference vResult = null] +{ + SchemaObjectName vName; + SqlDataTypeOption typeOption = SqlDataTypeOption.None; + // ... existing variables ... +} + : vName = schemaObjectFourPartName + { + typeOption = GetSqlDataTypeOption(vName); + // ... existing logic ... + } + ( + ( + {isXmlDataType}? + vResult = xmlDataType[vName] + | + {typeOption == SqlDataTypeOption.YourType}? // Add this condition + vResult = yourDataType[vName] + | + {typeOption != SqlDataTypeOption.None}? + vResult = sqlDataTypeWithoutNational[vName, typeOption] + // ... rest of existing alternatives +``` + +**Integration Requirements**: +- **Add type option check**: Use `{typeOption == SqlDataTypeOption.YourType}?` semantic predicate +- **Maintain order**: Place before the generic `sqlDataTypeWithoutNational` fallback +- **Update SqlDataTypeOption enum**: Add your type to the enum (implementation dependent) + +### 4. Add String Constants + +Add necessary string constants for keywords: + +```csharp +// Location: SqlScriptDom/Parser/TSql/CodeGenerationSupporter.cs +// Add alphabetically in the constants section (~line 427 for Float constants) + +internal const string YourType = "YOURTYPE"; +internal const string YourTypeParam1 = "PARAM1_KEYWORD"; +internal const string YourTypeParam2 = "PARAM2_KEYWORD"; +``` + +**Naming Convention**: +- **Use exact SQL keyword casing**: `VECTOR`, `FLOAT16`, `FLOAT32` +- **Group related constants**: Keep data type constants together +- **Alphabetical ordering**: Maintain alphabetical order within sections + +### 5. Create Script Generator + +Implement the visitor method to convert AST back to T-SQL: + +```csharp +// Location: SqlScriptDom/ScriptDom/SqlServer/ScriptGenerator/SqlScriptGeneratorVisitor.YourDataType.cs +// Create new file following naming convention + +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// +//------------------------------------------------------------------------------ + +namespace Microsoft.SqlServer.TransactSql.ScriptDom.ScriptGenerator +{ + partial class SqlScriptGeneratorVisitor + { + public override void ExplicitVisit(YourDataTypeReference node) + { + GenerateIdentifier(CodeGenerationSupporter.YourType); + GenerateSymbol(TSqlTokenType.LeftParenthesis); + GenerateFragmentIfNotNull(node.Parameter1); + if (node.Parameter2 != null) + { + GenerateSymbol(TSqlTokenType.Comma); + GenerateSpaceAndFragmentIfNotNull(node.Parameter2); + } + GenerateSymbol(TSqlTokenType.RightParenthesis); + } + } +} +``` + +**Script Generation Patterns**: +- **Use `GenerateIdentifier()`**: For type names and keywords +- **Use `GenerateSymbol()`**: For punctuation (`LeftParenthesis`, `Comma`, etc.) +- **Use `GenerateFragmentIfNotNull()`**: For required parameters +- **Use `GenerateSpaceAndFragmentIfNotNull()`**: For optional parameters with preceding space +- **Handle optional parameters**: Always check for null before generating + +### 6. Build and Test Grammar Changes + +Build the project to regenerate parser files: + +```bash +# Build the ScriptDOM project to regenerate parser from grammar +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug +``` + +**Common Build Issues**: +- **Grammar syntax errors**: Check ANTLR syntax in `.g` files +- **Missing constants**: Ensure all referenced constants exist in `CodeGenerationSupporter.cs` +- **AST node mismatches**: Verify AST class names match grammar return types + +### 7. Create Comprehensive Test Scripts + +Create test script covering all syntax variations: + +```sql +-- File: Test/SqlDom/TestScripts/YourDataTypeTests170.sql + +-- Basic syntax with single parameter +CREATE TABLE tbl (col1 YOURTYPE(100)); +DECLARE @var1 AS YOURTYPE(100); + +-- Extended syntax with optional parameters +CREATE TABLE tbl (col1 YOURTYPE(100, PARAM1)); +DECLARE @var2 AS YOURTYPE(100, PARAM2); + +-- Case insensitivity testing +CREATE TABLE tbl (col1 yourtype(100, param1)); +DECLARE @var3 AS YOURTYPE(100, param2); + +-- Integration with other SQL constructs +CREATE TABLE tbl ( + id INT PRIMARY KEY, + data YOURTYPE(100, PARAM1) NOT NULL +); + +-- Variables and parameters +CREATE FUNCTION TestFunction(@input YOURTYPE(100)) +RETURNS YOURTYPE(200, PARAM2) +AS +BEGIN + DECLARE @result YOURTYPE(200, PARAM2); + RETURN @result; +END; +``` + +**Test Coverage Requirements**: +- **All syntax variations**: Test with and without optional parameters +- **Case sensitivity**: Test different case combinations +- **Integration contexts**: Variables, table columns, function parameters/returns +- **Edge cases**: Minimum/maximum parameter values if applicable + +### 8. Generate Baseline Files + +Create the expected output baseline: + +1. **Create placeholder baseline**: `Test/SqlDom/Baselines170/YourDataTypeTests170.sql` +2. **Run the test** (will fail initially): + ```bash + dotnet test --filter "YourDataTypeTests170" Test/SqlDom/UTSqlScriptDom.csproj -c Debug + ``` +3. **Copy "Actual" output** from test failure into baseline file +4. **Verify formatting** matches parser's standard formatting + +**Baseline Example** (Vector data type): +```sql +CREATE TABLE tbl ( + embedding VECTOR(1) +); + +CREATE TABLE tbl ( + embedding VECTOR(1, float32) +); + +DECLARE @embedding AS VECTOR(2); + +DECLARE @embedding AS VECTOR(2, FLOAT32); +``` + +### 9. Configure Test Expectations + +Add test configuration to version-specific test class: + +```csharp +// Location: Test/SqlDom/Only170SyntaxTests.cs (or appropriate version) +// Add to the ParserTest170 array + +new ParserTest170("YourDataTypeTests170.sql"), +``` + +**Error Count Guidelines**: +- **Count all syntax instances**: Each usage of the new type in test script +- **Consider SQL version support**: When was the feature actually introduced? +- **Consistent across versions**: Usually same error count until supported version +- **Test validation**: Run tests to verify error counts are accurate + +### 10. Full Test Suite Validation + +Run complete test suite to ensure no regressions: + +```bash +# Run all ScriptDOM tests +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Expected result: All tests pass, including new data type tests +# Total tests: 1,100+ (number increases with new features) +``` + +**Regression Prevention**: +- **Grammar changes can break existing functionality**: Shared rules affect multiple contexts +- **AST changes can break script generation**: Ensure all visitors are updated +- **Version compatibility**: New syntax shouldn't break older version parsers + +## Advanced Considerations + +### Version-Specific Implementation + +For data types introduced in specific SQL Server versions: + +```antlr +// Different grammar files for different SQL versions +// TSql160.g - SQL Server 2022 features +// TSql170.g - SQL Server 2025 features +// TSqlFabricDW.g - Azure Synapse features +``` + +**Guidelines**: +- **Target appropriate version**: Add to the SQL version where feature was introduced +- **Cascade to later versions**: Copy rules to all subsequent version grammar files +- **Version-specific testing**: Test error behavior in older parsers + +### Complex Parameter Types + +For data types requiring complex parameter parsing: + +```xml + + + +``` + +**When to use complex types**: +- **ScalarExpression**: When parameters can be variables, function calls, or computed values +- **Collections**: When syntax supports multiple values or options +- **Custom classes**: When parameters have their own sub-syntax + +### Script Generator Considerations + +For complex formatting requirements: + +```csharp +public override void ExplicitVisit(ComplexDataTypeReference node) +{ + GenerateIdentifier(CodeGenerationSupporter.ComplexType); + GenerateSymbol(TSqlTokenType.LeftParenthesis); + + // Complex formatting with line breaks + if (node.HasMultipleParameters) + { + Indent(); + GenerateNewLine(); + } + + GenerateCommaSeparatedList(node.Parameters); + + if (node.HasMultipleParameters) + { + Outdent(); + GenerateNewLine(); + } + + GenerateSymbol(TSqlTokenType.RightParenthesis); +} +``` + +## Common Pitfalls and Solutions + +### 1. Forgetting Script Generator Implementation +**Problem**: AST node created but no script generation visitor +**Solution**: Always implement `ExplicitVisit()` method for new AST nodes + +### 2. Incorrect Grammar Integration +**Problem**: Data type not recognized in all contexts +**Solution**: Ensure integration with `scalarDataType` rule and proper semantic predicates + +### 3. Missing Version Compatibility +**Problem**: New type breaks older version parsers unexpectedly +**Solution**: Add proper version checks and test all SQL Server versions + +### 4. Incomplete Test Coverage +**Problem**: Edge cases not covered in testing +**Solution**: Test all syntax variations, case sensitivity, and integration contexts + +### 5. AST Design Issues +**Problem**: AST doesn't properly represent the SQL syntax +**Solution**: Design AST members to match SQL parameter structure and optionality + +## Validation Checklist + +- [ ] **AST Definition**: New class inherits from correct base class with appropriate members +- [ ] **Grammar Rules**: Specialized parsing rule handles all syntax variations +- [ ] **Grammar Integration**: Connected to `scalarDataType` with proper semantic predicate +- [ ] **String Constants**: All keywords added to `CodeGenerationSupporter.cs` +- [ ] **Script Generator**: `ExplicitVisit()` method generates correct T-SQL output +- [ ] **Test Scripts**: Comprehensive test coverage including edge cases +- [ ] **Baseline Files**: Generated output matches expected formatted T-SQL +- [ ] **Test Configuration**: Error counts configured for all SQL Server versions +- [ ] **Build Success**: Project builds without errors and regenerates parser +- [ ] **Full Test Suite**: All existing tests continue to pass (no regressions) + +## Related Guides + +- [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) - For general grammar modifications +- [function.guidelines.instructions.md](function.guidelines.instructions.md) - For adding system functions +- [validation_fix.guidelines.instructions.md](validation_fix.guidelines.instructions.md) - For validation-only issues +- [testing.guidelines.instructions.md](testing.guidelines.instructions.md) - For comprehensive testing strategies + +## Real-World Examples + +### Vector Data Type (SQL Server 2025) +- **AST Class**: `VectorDataTypeReference` with `Dimension` and `BaseType` members +- **Syntax**: `VECTOR(1536)`, `VECTOR(1536, FLOAT32)` +- **Commits**: cd69b78, 38a0971 +- **Challenge**: Optional second parameter with identifier type + +### Future Examples +This pattern can be applied to other SQL Server data types like: +- **GEOMETRY**: Spatial data with complex parameters +- **GEOGRAPHY**: Geographic data with coordinate systems +- **HIERARCHYID**: Hierarchical data with custom syntax +- **Custom CLR Types**: User-defined types with specialized parameters + +The Vector implementation serves as the canonical example for this pattern and should be referenced for similar future data type additions. \ No newline at end of file diff --git a/.github/instructions/new_index_types.guidelines.instructions.md b/.github/instructions/new_index_types.guidelines.instructions.md new file mode 100644 index 0000000..dacba71 --- /dev/null +++ b/.github/instructions/new_index_types.guidelines.instructions.md @@ -0,0 +1,546 @@ +# Guidelines for Adding New Index Types to SqlScriptDOM + +This guide provides step-by-step instructions for adding support for new SQL Server index types to the SqlScriptDOM parser. This pattern was established from the JSON and Vector index implementations found in SQL Server 2025 (TSql170). + +## When to Use This Guide + +Use this pattern when: +- βœ… Adding a **completely new SQL Server index type** (e.g., JSON INDEX, VECTOR INDEX, SPATIAL INDEX) +- βœ… The index type has **specialized syntax** not handled by standard CREATE INDEX +- βœ… The index type requires **custom parsing logic** for type-specific clauses or options +- βœ… The index type is **introduced in a specific SQL Server version** + +**Do NOT use this guide for:** +- ❌ Adding new index options to existing index types (use [validation_fix.guidelines.instructions.md](validation_fix.guidelines.instructions.md)) +- ❌ Adding standard indexes with new keywords (use [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md)) +- ❌ Adding function or data type syntax (use respective guides) + +## Real-World Examples: JSON and Vector Indexes + +### JSON Index Implementation +```sql +-- Basic JSON index +CREATE JSON INDEX IX_JSON_Basic ON dbo.Users (JsonData); + +-- JSON index with FOR clause (multiple paths) +CREATE JSON INDEX IX_JSON_Paths ON dbo.Users (JsonData) +FOR ('$.name', '$.email', '$.age'); + +-- JSON index with WITH options +CREATE JSON INDEX IX_JSON_Options ON dbo.Users (JsonData) +WITH (OPTIMIZE_FOR_ARRAY_SEARCH = ON, MAXDOP = 4); +``` + +### Vector Index Implementation +```sql +-- Basic vector index +CREATE VECTOR INDEX IX_Vector_Basic ON dbo.Documents (VectorData); + +-- Vector index with metric and type +CREATE VECTOR INDEX IX_Vector_Complete ON dbo.Documents (VectorData) +WITH (METRIC = 'cosine', TYPE = 'DiskANN'); + +-- Vector index with filegroup +CREATE VECTOR INDEX IX_Vector_FG ON dbo.Documents (VectorData) +WITH (METRIC = 'dot') +ON [PRIMARY]; +``` + +### Key Challenges Solved +- **Type-specific syntax**: JSON INDEX has `FOR (paths)` clause, VECTOR INDEX has `METRIC`/`TYPE` options +- **Custom columns**: Single column specification instead of column lists +- **Specialized options**: New index options specific to each index type +- **Grammar integration**: Seamless integration with existing CREATE INDEX patterns + +## Step-by-Step Implementation Guide + +### 1. Define AST Node Structure (`Ast.xml`) + +Add a new class inheriting from `IndexStatement`: + +```xml + + + + + + + + + + + +``` + +**Design Principles**: +- **Inherit from `IndexStatement`**: All index types inherit from this base class +- **Reuse standard properties**: `Name`, `OnName`, `IndexOptions` come from base class +- **Add type-specific members**: Properties unique to your index type +- **Collections for lists**: Use `Collection="true"` for array-like properties +- **Optional members**: Members can be null for optional syntax elements + +### 2. Add Grammar Rule (`TSql*.g`) + +Create a specialized parsing rule for your index type: + +```antlr +// Location: SqlScriptDom/Parser/TSql/TSql170.g (or appropriate version) +// Add after existing index statement rules (~line 17021) + +createYourTypeIndexStatement [IToken tUnique, bool? isClustered] returns [CreateYourTypeIndexStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + SchemaObjectName vSchemaObjectName; + Identifier vSpecializedColumn; + StringLiteral vProperty; + FileGroupOrPartitionScheme vFileGroupOrPartitionScheme; + + if (tUnique != null) + { + ThrowIncorrectSyntaxErrorException(tUnique); + } + if (isClustered.HasValue) + { + ThrowIncorrectSyntaxErrorException(LT(1)); + } +} + : tYourType:Identifier tIndex:Index vIdentifier=identifier + { + Match(tYourType, CodeGenerationSupporter.YourType); + vResult.Name = vIdentifier; + } + tOn:On vSchemaObjectName=schemaObjectThreePartName + { + vResult.OnName = vSchemaObjectName; + } + LeftParenthesis vSpecializedColumn=identifier tRParen:RightParenthesis + { + vResult.SpecializedColumn = vSpecializedColumn; + UpdateTokenInfo(vResult, tRParen); + } + ( + tFor:For LeftParenthesis + vProperty=stringLiteral + { + AddAndUpdateTokenInfo(vResult, vResult.TypeSpecificProperty, vProperty); + } + ( + Comma vProperty=stringLiteral + { + AddAndUpdateTokenInfo(vResult, vResult.TypeSpecificProperty, vProperty); + } + )* + RightParenthesis + )? + ( + // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + With + indexOptionList[IndexAffectingStatement.CreateIndex, vResult.IndexOptions, vResult] + )? + ( + On vFileGroupOrPartitionScheme=filegroupOrPartitionScheme + { + vResult.OnFileGroupOrPartitionScheme = vFileGroupOrPartitionScheme; + } + )? + ; +``` + +**Grammar Pattern Explanation**: +- **Parameter validation**: Reject UNIQUE and CLUSTERED if not supported +- **Standard index structure**: Name, ON table, column specification +- **Type-specific clauses**: Optional FOR, WITH, ON clauses as appropriate +- **Token matching**: Use `Match()` to verify keywords +- **Collection building**: Use `AddAndUpdateTokenInfo()` for lists + +### 3. Integrate with Main Index Grammar + +Add your index type to the main CREATE INDEX rule: + +```antlr +// Location: SqlScriptDom/Parser/TSql/TSql170.g +// Find createIndexStatement rule (~line 16880) and add integration + +createIndexStatement [IToken tUnique, bool? isClustered] returns [TSqlStatement vResult] + : // ... existing alternatives ... + | vResult=createYourTypeIndexStatement[tUnique, isClustered] + ; + +// Also add to ddlStatement if needed (~line 885) +ddlStatement returns [TSqlStatement vResult] + : // ... existing alternatives ... + | vResult=createYourTypeIndexStatement[null, null] + // ... rest of alternatives ... + ; +``` + +**Integration Requirements**: +- **Add to `createIndexStatement`**: Main CREATE INDEX dispatch rule +- **Add to `ddlStatement`**: Top-level DDL statement recognition +- **Parameter passing**: Pass `tUnique` and `isClustered` tokens +- **Consistent ordering**: Place appropriately among other index types + +### 4. Add String Constants + +Add necessary keywords to `CodeGenerationSupporter.cs`: + +```csharp +// Location: SqlScriptDom/Parser/TSql/CodeGenerationSupporter.cs +// Add alphabetically in the constants section + +internal const string YourType = "YOUR_TYPE"; +internal const string YourTypeSpecificKeyword1 = "KEYWORD1"; +internal const string YourTypeSpecificKeyword2 = "KEYWORD2"; +``` + +**Naming Convention**: +- **Use exact SQL keyword casing**: `Json`, `Vector`, `Metric` +- **Group related constants**: Keep index-specific constants together +- **Follow existing patterns**: Match existing naming conventions + +### 5. Add Index Options (if needed) + +If your index type requires new index options, add them: + +```csharp +// Location: SqlScriptDom/ScriptDom/SqlServer/IndexOptionKind.cs +// Add to the enum +public enum IndexOptionKind +{ + // ... existing options ... + YourTypeSpecificOption1, + YourTypeSpecificOption2, + // ... rest of enum ... +} + +// Location: SqlScriptDom/Parser/TSql/IndexOptionHelper.cs +// Add option mappings in the constructor +AddOptionMapping(IndexOptionKind.YourTypeSpecificOption1, CodeGenerationSupporter.YourTypeSpecificKeyword1, SqlVersionFlags.TSql170AndAbove); +AddOptionMapping(IndexOptionKind.YourTypeSpecificOption2, CodeGenerationSupporter.YourTypeSpecificKeyword2, SqlVersionFlags.TSql170AndAbove); +``` + +**Option Guidelines**: +- **Add to enum**: Define new `IndexOptionKind` values +- **Register mappings**: Map keywords to enum values with version flags +- **Version compatibility**: Use appropriate `SqlVersionFlags` + +### 6. Create Script Generator + +Implement the visitor method to convert AST back to T-SQL: + +```csharp +// Location: SqlScriptDom/ScriptDom/SqlServer/ScriptGenerator/SqlScriptGeneratorVisitor.CreateYourTypeIndexStatement.cs +// Create new file following naming convention + +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// +//------------------------------------------------------------------------------ +using System.Collections.Generic; +using Microsoft.SqlServer.TransactSql.ScriptDom; + +namespace Microsoft.SqlServer.TransactSql.ScriptDom.ScriptGenerator +{ + partial class SqlScriptGeneratorVisitor + { + public override void ExplicitVisit(CreateYourTypeIndexStatement node) + { + GenerateKeyword(TSqlTokenType.Create); + + GenerateSpaceAndIdentifier(CodeGenerationSupporter.YourType); + + GenerateSpaceAndKeyword(TSqlTokenType.Index); + + // name + GenerateSpaceAndFragmentIfNotNull(node.Name); + + NewLineAndIndent(); + GenerateKeyword(TSqlTokenType.On); + GenerateSpaceAndFragmentIfNotNull(node.OnName); + + // Specialized column + if (node.SpecializedColumn != null) + { + GenerateSpace(); + GenerateSymbol(TSqlTokenType.LeftParenthesis); + GenerateFragmentIfNotNull(node.SpecializedColumn); + GenerateSymbol(TSqlTokenType.RightParenthesis); + } + + // Type-specific clause + if (node.TypeSpecificProperty != null && node.TypeSpecificProperty.Count > 0) + { + NewLineAndIndent(); + GenerateKeyword(TSqlTokenType.For); + GenerateSpace(); + GenerateParenthesisedCommaSeparatedList(node.TypeSpecificProperty); + } + + GenerateIndexOptions(node.IndexOptions); + + if (node.OnFileGroupOrPartitionScheme != null) + { + NewLineAndIndent(); + GenerateKeyword(TSqlTokenType.On); + + GenerateSpaceAndFragmentIfNotNull(node.OnFileGroupOrPartitionScheme); + } + } + } +} +``` + +**Script Generation Patterns**: +- **Use `GenerateKeyword()`**: For T-SQL keywords like CREATE, INDEX, ON +- **Use `GenerateIdentifier()`**: For type-specific keywords +- **Use `NewLineAndIndent()`**: For proper formatting with line breaks +- **Use `GenerateIndexOptions()`**: Reuse existing index option generation +- **Handle collections**: Use `GenerateParenthesisedCommaSeparatedList()` for arrays + +### 7. Build and Test Grammar Changes + +Build the project to regenerate parser files: + +```bash +# Build the ScriptDOM project to regenerate parser from grammar +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug +``` + +**Common Build Issues**: +- **Grammar syntax errors**: Check ANTLR syntax in `.g` files +- **Missing constants**: Ensure all referenced constants exist in `CodeGenerationSupporter.cs` +- **AST node mismatches**: Verify AST class names match grammar return types +- **Option registration**: Ensure new index options are properly registered + +### 8. Create Comprehensive Test Scripts + +Create test script covering all syntax variations: + +```sql +-- File: Test/SqlDom/TestScripts/YourTypeIndexTests170.sql + +-- Basic index creation +CREATE YOUR_TYPE INDEX IX_YourType_Basic ON dbo.Table1 (SpecializedColumn); + +-- Index with type-specific clause +CREATE YOUR_TYPE INDEX IX_YourType_WithClause ON dbo.Table1 (SpecializedColumn) +FOR ('value1', 'value2', 'value3'); + +-- Index with WITH options +CREATE YOUR_TYPE INDEX IX_YourType_WithOptions ON dbo.Table1 (SpecializedColumn) +WITH (YOUR_TYPE_OPTION1 = 'value', MAXDOP = 4); + +-- Index with type-specific clause and WITH options +CREATE YOUR_TYPE INDEX IX_YourType_Complete ON dbo.Table1 (SpecializedColumn) +FOR ('property1', 'property2') +WITH (YOUR_TYPE_OPTION1 = 'setting', YOUR_TYPE_OPTION2 = 'config'); + +-- Index on schema-qualified table +CREATE YOUR_TYPE INDEX IX_YourType_Schema ON MySchema.MyTable (Column1) +FOR ('path.value'); + +-- Index with quoted identifiers +CREATE YOUR_TYPE INDEX [IX YourType Index] ON [dbo].[Table1] ([Column Name]) +FOR ('complex.path.expression'); + +-- Index with filegroup +CREATE YOUR_TYPE INDEX IX_YourType_Filegroup ON dbo.Table1 (Column1) +WITH (YOUR_TYPE_OPTION1 = 'setting') +ON [PRIMARY]; + +-- Index with complex options +CREATE YOUR_TYPE INDEX IX_YourType_AllOptions ON dbo.Table1 (Column1) +FOR ('value1', 'value2') +WITH (YOUR_TYPE_OPTION1 = 'config', YOUR_TYPE_OPTION2 = 'setting', MAXDOP = 8, ONLINE = OFF); +``` + +**Test Coverage Requirements**: +- **All syntax variations**: Basic, with clauses, with options, combinations +- **Schema qualification**: Different schema and table names +- **Quoted identifiers**: Test case sensitivity and special characters +- **Integration contexts**: Filegroups, partition schemes, standard index options +- **Edge cases**: Empty clauses, maximum option combinations + +### 9. Generate Baseline Files + +Create the expected output baseline: + +1. **Create placeholder baseline**: `Test/SqlDom/Baselines170/YourTypeIndexTests170.sql` +2. **Run the test** (will fail initially): + ```bash + dotnet test --filter "YourTypeIndexTests170" Test/SqlDom/UTSqlScriptDom.csproj -c Debug + ``` +3. **Copy "Actual" output** from test failure into baseline file +4. **Verify formatting** matches parser's standard formatting + +**Baseline Example** (JSON index): +```sql +CREATE JSON INDEX IX_JSON_Basic +ON dbo.Users (JsonData); + +CREATE JSON INDEX IX_JSON_Paths +ON dbo.Users (JsonData) FOR ('$.name', '$.email', '$.age'); + +CREATE JSON INDEX IX_JSON_Options +ON dbo.Users (JsonData) WITH (OPTIMIZE_FOR_ARRAY_SEARCH = ON, MAXDOP = 4); +``` + +### 10. Configure Test Expectations + +Add test configuration to version-specific test class: + +```csharp +// Location: Test/SqlDom/Only170SyntaxTests.cs (or appropriate version) +// Add to the ParserTest170 array + +new ParserTest170("YourTypeIndexTests170.sql"), +``` + +**Error Count Guidelines**: +- **Count all syntax instances**: Each CREATE INDEX statement in test script +- **Consider SQL version support**: When was the index type actually introduced? +- **Account for options**: New index options may add additional errors in older versions +- **Test validation**: Run tests to verify error counts are accurate + +### 11. Full Test Suite Validation + +Run complete test suite to ensure no regressions: + +```bash +# Run all ScriptDOM tests +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Expected result: All tests pass, including new index type tests +# Total tests: 1,100+ (number increases with new features) +``` + +**Regression Prevention**: +- **Grammar changes can break existing functionality**: Shared rules affect multiple contexts +- **AST changes can break script generation**: Ensure all visitors are updated +- **Index option additions**: New options shouldn't conflict with existing ones +- **Version compatibility**: New syntax shouldn't break older version parsers + +## Advanced Considerations + +### Version-Specific Implementation + +For index types introduced in specific SQL Server versions: + +```antlr +// Different grammar files for different SQL versions +// TSql160.g - SQL Server 2022 features (if backporting) +// TSql170.g - SQL Server 2025 features +// TSqlFabricDW.g - Azure Synapse features +``` + +**Guidelines**: +- **Target appropriate version**: Add to the SQL version where feature was introduced +- **Cascade to later versions**: Copy rules to all subsequent version grammar files +- **Version-specific testing**: Test error behavior in older parsers + +### Complex Index Options + +For index types requiring specialized index options: + +```xml + + + + +``` + +**When to use complex options**: +- **Type-specific options**: Options that only apply to your index type +- **Complex values**: Options with structured values or multiple parameters +- **Validation requirements**: Options that need special validation logic + +### Filegroup and Partition Support + +For index types that support filegroups or partitioning: + +```antlr +// Add filegroup support to your grammar rule +( + On vFileGroupOrPartitionScheme=filegroupOrPartitionScheme + { + vResult.OnFileGroupOrPartitionScheme = vFileGroupOrPartitionScheme; + } +)? +``` + +**Filegroup considerations**: +- **Optional support**: Not all index types support filegroups +- **Partition schemes**: Some index types may support partitioning +- **Storage options**: Consider FILESTREAM or in-memory storage + +## Common Pitfalls and Solutions + +### 1. Forgetting Script Generator Implementation +**Problem**: AST node created but no script generation visitor +**Solution**: Always implement `ExplicitVisit()` method for new index statement nodes + +### 2. Incorrect Grammar Integration +**Problem**: Index type not recognized in all CREATE INDEX contexts +**Solution**: Ensure integration with both `createIndexStatement` and `ddlStatement` rules + +### 3. Missing Index Option Registration +**Problem**: New index options not recognized by parser +**Solution**: Add options to `IndexOptionKind` enum and register in `IndexOptionHelper` + +### 4. Incomplete Test Coverage +**Problem**: Edge cases not covered in testing +**Solution**: Test all syntax variations, option combinations, and integration contexts + +### 5. Grammar Conflicts +**Problem**: New keywords conflict with existing grammar +**Solution**: Use proper semantic predicates and context-specific matching + +### 6. Version Compatibility Issues +**Problem**: New index type breaks older version parsers unexpectedly +**Solution**: Add proper version checks and test all SQL Server versions + +## Validation Checklist + +- [ ] **AST Definition**: New class inherits from `IndexStatement` with appropriate members +- [ ] **Grammar Rules**: Specialized parsing rule handles all syntax variations +- [ ] **Grammar Integration**: Connected to `createIndexStatement` and `ddlStatement` +- [ ] **String Constants**: All keywords added to `CodeGenerationSupporter.cs` +- [ ] **Index Options**: New options added to enum and registered with version flags +- [ ] **Script Generator**: `ExplicitVisit()` method generates correct T-SQL output +- [ ] **Test Scripts**: Comprehensive test coverage including edge cases +- [ ] **Baseline Files**: Generated output matches expected formatted T-SQL +- [ ] **Test Configuration**: Error counts configured for all SQL Server versions +- [ ] **Build Success**: Project builds without errors and regenerates parser +- [ ] **Full Test Suite**: All existing tests continue to pass (no regressions) + +## Related Guides + +- [bug_fixing.guidelines.instructions.md](bug_fixing.guidelines.instructions.md) - For general grammar modifications +- [new_data_types.guidelines.instructions.md](new_data_types.guidelines.instructions.md) - For adding new data types +- [validation_fix.guidelines.instructions.md](validation_fix.guidelines.instructions.md) - For validation-only issues +- [testing.guidelines.instructions.md](testing.guidelines.instructions.md) - For comprehensive testing strategies + +## Real-World Examples + +### JSON Index (SQL Server 2025) +- **AST Class**: `CreateJsonIndexStatement` with `JsonColumn` and `ForJsonPaths` members +- **Syntax**: `CREATE JSON INDEX name ON table (column) FOR (paths)` +- **Special Features**: FOR clause with path specifications, array search optimization +- **Challenge**: Multiple JSON path support in FOR clause + +### Vector Index (SQL Server 2025) +- **AST Class**: `CreateVectorIndexStatement` with `VectorColumn` member +- **Syntax**: `CREATE VECTOR INDEX name ON table (column) WITH (METRIC = value)` +- **Special Features**: Vector-specific metrics (cosine, dot, euclidean), DiskANN type +- **Challenge**: Specialized index options for vector operations + +### Future Examples +This pattern can be applied to other SQL Server index types like: +- **SPATIAL INDEX**: Geometry/geography data indexing +- **FULLTEXT INDEX**: Text search indexing +- **XML INDEX**: XML document indexing +- **Custom Index Types**: Future SQL Server indexing technologies + +The JSON and Vector implementations serve as canonical examples for this pattern and should be referenced for similar future index type additions. \ No newline at end of file diff --git a/.github/instructions/parser.guidelines.instructions.md b/.github/instructions/parser.guidelines.instructions.md new file mode 100644 index 0000000..20da2e6 --- /dev/null +++ b/.github/instructions/parser.guidelines.instructions.md @@ -0,0 +1,164 @@ +# Parser Predicate Recognition Bug Fix Guide + +This guide documents the specific pattern for fixing bugs where identifier-based predicates (like `REGEXP_LIKE`) are not properly recognized when wrapped in parentheses in boolean expressions. + +## Problem Description + +**Symptom**: Parentheses around identifier-based boolean predicates cause syntax errors. +- Example: `SELECT 1 WHERE (REGEXP_LIKE('a', 'pattern'))` fails to parse +- Works: `SELECT 1 WHERE REGEXP_LIKE('a', 'pattern')` (without parentheses) + +**Root Cause**: The `IsNextRuleBooleanParenthesis()` function in `TSql80ParserBaseInternal.cs` only recognizes: +- Keyword-based predicates (tokens): `LIKE`, `BETWEEN`, `CONTAINS`, `EXISTS`, etc. +- One identifier-based predicate: `IIF` +- But doesn't recognize newer identifier-based predicates like `REGEXP_LIKE` + +## Understanding the Fix + +### The `IsNextRuleBooleanParenthesis()` Function + +This function determines whether parentheses contain a boolean expression vs. a scalar expression. It scans forward from a `LeftParenthesis` token looking for boolean operators or predicates. + +**Location**: `SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs` + +**Key Logic**: +```csharp +case TSql80ParserInternal.Identifier: + // if identifier is IIF + if(NextTokenMatches(CodeGenerationSupporter.IIf)) + { + ++insideIIf; + } + // ADD NEW IDENTIFIER-BASED PREDICATES HERE + break; +``` + +### The Solution Pattern + +For identifier-based boolean predicates, add detection logic in the `Identifier` case: + +```csharp +case TSql80ParserInternal.Identifier: + // if identifier is IIF + if(NextTokenMatches(CodeGenerationSupporter.IIf)) + { + ++insideIIf; + } + // if identifier is REGEXP_LIKE + else if(NextTokenMatches(CodeGenerationSupporter.RegexpLike)) + { + if (caseDepth == 0 && topmostSelect == 0 && insideIIf == 0) + { + matches = true; + loop = false; + } + } + break; +``` + +## Step-by-Step Fix Process + +### 1. Reproduce the Issue +Create a test case within the existing test framework to confirm the bug: +```csharp +[TestMethod] +public void ReproduceParenthesesIssue() +{ + var parser = new TSql170Parser(true); + var sql = "SELECT 1 WHERE (REGEXP_LIKE('a', 'pattern'));"; + var result = parser.Parse(new StringReader(sql), out var errors); + // Should fail before fix, pass after fix + Assert.AreEqual(0, errors.Count, "Should parse without errors after fix"); +} +``` + +**⚠️ IMPORTANT**: Add this test to an existing test class like `Only170SyntaxTests.cs`, **do not** create a new test project. + +### 2. Identify the Predicate Constant +Find the predicate identifier in `CodeGenerationSupporter`: +```csharp +// In CodeGenerationSupporter.cs +public const string RegexpLike = "REGEXP_LIKE"; +``` + +### 3. Apply the Fix +Modify `TSql80ParserBaseInternal.cs` in the `IsNextRuleBooleanParenthesis()` method: + +**File**: `SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs` +**Method**: `IsNextRuleBooleanParenthesis()` +**Location**: Around line 808, in the `case TSql80ParserInternal.Identifier:` block + +Add the predicate detection logic following the pattern shown above. + +### 4. Update Test Cases +Add test cases covering the parentheses scenario: + +**Test Script**: `Test/SqlDom/TestScripts/RegexpLikeTests170.sql` +```sql +SELECT 1 WHERE (REGEXP_LIKE('a', '%pattern%')); +``` + +**Baseline**: `Test/SqlDom/Baselines170/RegexpLikeTests170.sql` +```sql +SELECT 1 +WHERE (REGEXP_LIKE ('a', '%pattern%')); +``` + +**Test Configuration**: Update error counts in `Only170SyntaxTests.cs` if the new test cases affect older parser versions. + +### 5. Build and Verify +```bash +# Build the parser +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Run the specific test +dotnet test Test/SqlDom/UTSqlScriptDom.csproj --filter "FullyQualifiedName~SqlStudio.Tests.UTSqlScriptDom.SqlDomTests.TSql170SyntaxIn170ParserTest" -c Debug +``` + +## When to Apply This Pattern + +This fix pattern applies when: + +1. **Identifier-based predicates**: The predicate is defined as an identifier (not a keyword token) +2. **Boolean context**: The predicate returns a boolean value for use in WHERE clauses, CHECK constraints, etc. +3. **Parentheses fail**: The predicate works without parentheses but fails with parentheses +4. **Already implemented**: The predicate grammar and AST are already correctly implemented + +## Common Predicates That May Need This Fix + +- `REGEXP_LIKE` (βœ… Fixed) +- Future identifier-based boolean functions +- Custom function predicates that return boolean values + +## Related Files Modified + +This type of fix typically involves: + +1. **Core Parser Logic**: + - `SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs` - Main fix + +2. **Test Infrastructure**: + - `Test/SqlDom/TestScripts/[TestName].sql` - Input test cases + - `Test/SqlDom/Baselines[Version]/[TestName].sql` - Expected output + - `Test/SqlDom/Only[Version]SyntaxTests.cs` - Test configuration + +3. **Potentially Affected**: + - `Test/SqlDom/TestScripts/BooleanExpressionTests.sql` - May need additional test cases + - `Test/SqlDom/BaselinesCommon/BooleanExpressionTests.sql` - Corresponding baselines + +## Verification Checklist + +- [ ] Parentheses syntax parses without errors +- [ ] Non-parentheses syntax still works +- [ ] Test suite passes for target SQL version +- [ ] Older SQL versions have appropriate error counts +- [ ] Related boolean expression tests still pass + +## Notes and Gotchas + +- **IIF Special Handling**: `IIF` has special logic (`++insideIIf`) because it's not a simple boolean predicate +- **Context Conditions**: The fix includes conditions (`caseDepth == 0 && topmostSelect == 0 && insideIIf == 0`) to ensure proper parsing context +- **Token vs Identifier**: Keyword predicates are handled as tokens, identifier predicates need special detection +- **Cross-Version Impact**: Adding test cases may increase error counts for older SQL Server parsers + +This pattern ensures that identifier-based boolean predicates work consistently with parentheses, maintaining parser compatibility across different syntactic contexts. \ No newline at end of file diff --git a/.github/instructions/testing.guidelines.instructions.md b/.github/instructions/testing.guidelines.instructions.md new file mode 100644 index 0000000..ec62811 --- /dev/null +++ b/.github/instructions/testing.guidelines.instructions.md @@ -0,0 +1,814 @@ +# Testing Guidelines for SqlScriptDOM + +This guide provides comprehensive instructions for adding and running tests in the SqlScriptDOM parser, based on the testing framework patterns and best practices. + +## Overview + +**CRITICAL: YOU MUST ADD UNIT TESTS - DO NOT CREATE STANDALONE PROGRAMS TO TEST** + +The SqlScriptDOM testing framework validates parser functionality through: +1. **Parse β†’ Generate β†’ Parse Round-trip Testing** - Ensures syntax is correctly parsed and regenerated +2. **Baseline Comparison** - Verifies generated T-SQL matches expected formatted output +3. **Error Count Validation** - Confirms expected parse errors for invalid syntax across SQL versions +4. **Version-Specific Testing** - Tests syntax across multiple SQL Server versions (SQL 2000-2025) +5. **Exact T-SQL Verification** - When testing specific T-SQL syntax from prompts or user requests, the **exact T-SQL statement must be included and verified** in the test to ensure the specific syntax works as expected + +## Quick Verification Tests + +For rapid verification and debugging, add simple test methods directly to existing test classes: + +```csharp +[TestMethod] +[Priority(0)] +[SqlStudioTestCategory(Category.UnitTest)] +public void VerifyMyNewSyntax() +{ + var parser = new TSql170Parser(true); + + // Test basic syntax + var query1 = "SELECT YOUR_NEW_FUNCTION('param1', 'param2');"; + var result1 = parser.Parse(new StringReader(query1), out var errors1); + Assert.AreEqual(0, errors1.Count, "Basic syntax should parse"); + + // Test complex variations + var query2 = "SELECT YOUR_NEW_FUNCTION(@variable);"; + var result2 = parser.Parse(new StringReader(query2), out var errors2); + Assert.AreEqual(0, errors2.Count, "Variable syntax should parse"); + + Console.WriteLine("βœ… All tests passed!"); +} +``` + +**Where to Add Quick Tests:** +- **SQL Server 2025 (170) features**: Add to `Test/SqlDom/Only170SyntaxTests.cs` +- **SQL Server 2022 (160) features**: Add to `Test/SqlDom/Only160SyntaxTests.cs` +- **Earlier versions**: Add to corresponding `OnlySyntaxTests.cs` + +**When to Use:** +- Quick verification during development +- Debugging parser issues +- Initial syntax validation before full test suite +- Rapid prototyping of test cases + +## Test Framework Architecture + +### Core Components + +- **Test Scripts** (`Test/SqlDom/TestScripts/`) - Input T-SQL files containing syntax to test +- **Baselines** (`Test/SqlDom/Baselines/`) - Expected formatted output for each test script +- **Test Configuration** (`Test/SqlDom/OnlySyntaxTests.cs`) - Test definitions with error expectations +- **Test Runners** - MSTest framework running parse/generate/validate cycles + +### How Tests Work + +1. **Parse Phase**: Test script is parsed using specified SQL Server version parser +2. **Generate Phase**: Parsed AST is converted back to T-SQL using script generator +3. **Validate Phase**: Generated output is compared against baseline file +4. **Error Validation**: Parse error count is compared against expected error count for each SQL version + +## ❌ Anti-Patterns: What NOT to Do + +### Do NOT Create New Test Projects + +- ❌ **Don't create new `.csproj` files for testing** +- ❌ **Don't create console applications** like `TestVectorParser.csproj` or `debug_complex.csproj` +- ❌ **Don't create standalone test runners** +- ❌ **Don't add new projects to the solution for testing** + +### Why This Causes Problems + +1. **Build Issues**: New projects often fail to build due to missing dependencies +2. **Integration Problems**: Standalone projects don't integrate with existing test infrastructure +3. **Maintenance Overhead**: Additional projects require separate maintenance and documentation +4. **CI/CD Conflicts**: Build pipelines aren't configured for ad-hoc test projects +5. **Resource Waste**: Creates duplicate testing infrastructure instead of using established patterns + +### The Correct Approach + +βœ… **Always add test methods to existing test classes**: +- Add to `Test/SqlDom/Only170SyntaxTests.cs` for SQL Server 2025 features +- Add to `Test/SqlDom/Only160SyntaxTests.cs` for SQL Server 2022 features +- Use the established test framework patterns documented in this guide + +## Adding New Tests + +### 1. Create Test Script + +Create a new `.sql` file in `Test/SqlDom/TestScripts/` with descriptive name: + +**File**: `Test/SqlDom/TestScripts/YourFeatureTests160.sql` +```sql +-- Test basic syntax +SELECT JSON_ARRAY('value1', 'value2'); + +-- Test in complex context +ALTER FUNCTION TestFunction() +RETURNS NVARCHAR(MAX) +AS +BEGIN + RETURN (JSON_ARRAY('name', 'value')); +END; +GO + +-- Test edge cases +SELECT JSON_ARRAY(); +SELECT JSON_ARRAY(NULL, 'test', 123); +``` + +**CRITICAL**: When testing specific T-SQL syntax from user prompts or requests, **include the exact T-SQL statement provided** in your test script. Do not modify, simplify, or generalize the syntax - test the precise statement that was requested. + +**Example**: If the user provides: +```sql +SELECT JSON_OBJECTAGG( t.c1 : t.c2 ) +FROM ( + VALUES('key1', 'c'), ('key2', 'b'), ('key3','a') +) AS t(c1, c2); +``` + +Then your test **must include exactly that statement** to verify the specific syntax works. + +**Naming Convention**: +- `Tests.sql` (e.g., `JsonFunctionTests160.sql`) +- `Tests.sql` (e.g., `CreateTableTests170.sql`) +- Use version number corresponding to SQL Server version where feature was introduced + +### 2. Create Baseline File + +Create corresponding baseline file in version-specific baseline directory: + +**File**: `Test/SqlDom/Baselines160/YourFeatureTests160.sql` + +**Initial Creation**: +1. Create empty or placeholder baseline file first +2. Run the test (it will fail) +3. Copy "Actual" output from test failure message +4. Paste into baseline file with proper formatting + +**Example Baseline**: +```sql +SELECT JSON_ARRAY ('value1', 'value2'); + +ALTER FUNCTION TestFunction +( ) +RETURNS NVARCHAR (MAX) +AS +BEGIN + RETURN (JSON_ARRAY ('name', 'value')); +END + +GO + +SELECT JSON_ARRAY (); +SELECT JSON_ARRAY (NULL, 'test', 123); +``` + +**Formatting Notes**: +- Parser adds consistent spacing around parentheses and operators +- GO statements are preserved +- Indentation follows parser's formatting rules + +### 3. Configure Test Entry + +Add test configuration to appropriate `OnlySyntaxTests.cs` file: + +**File**: `Test/SqlDom/Only160SyntaxTests.cs` +```csharp +// Around line where other ParserTest160 entries are defined + +// Option 1: Simplified - only specify error counts you care about +new ParserTest160("YourFeatureTests160.sql"), // All previous versions default to null (ignored), TSql160 expects 0 errors + +// Option 2: Specify only some previous version error counts +new ParserTest160("YourFeatureTests160.sql", nErrors80: 1, nErrors90: 1), // Only SQL 2000/2005 expect errors + +// Option 3: Full specification (legacy compatibility) +new ParserTest160("YourFeatureTests160.sql", + nErrors80: 1, // SQL Server 2000 - expect error for new syntax + nErrors90: 1, // SQL Server 2005 - expect error for new syntax + nErrors100: 1, // SQL Server 2008 - expect error for new syntax + nErrors110: 1, // SQL Server 2012 - expect error for new syntax + nErrors120: 1, // SQL Server 2014 - expect error for new syntax + nErrors130: 1, // SQL Server 2016 - expect error for new syntax + nErrors140: 1, // SQL Server 2017 - expect error for new syntax + nErrors150: 1 // SQL Server 2019 - expect error for new syntax + // nErrors160: 0 is implicit for SQL Server 2022 - expect success +), +``` + +**Error Count Guidelines**: +- **0 errors**: Syntax should parse successfully in this SQL version +- **1+ errors**: Syntax should fail with specified number of parse errors +- **null (default)**: Error count is ignored for this SQL version - test will pass regardless of actual error count +- **Consider SQL version compatibility**: When was the feature introduced? + +**New Simplified Approach**: ParserTest160 (and later versions) use nullable parameters with default values of `null`. This means: +- You only need to specify error counts for versions where you expect specific behavior +- Unspecified parameters default to `null` and their error counts are ignored +- TSql160 parser (current version) always expects 0 errors unless syntax is intentionally invalid + +### 4. Run and Validate Test + +#### Run Specific Test +```bash +# Run specific test method +dotnet test Test/SqlDom/UTSqlScriptDom.csproj --filter "FullyQualifiedName~TSql160SyntaxIn160ParserTest" -c Debug + +# Run tests for specific version +dotnet test Test/SqlDom/UTSqlScriptDom.csproj --filter "TestCategory=TSql160" -c Debug +``` + +#### Run Full Test Suite +```bash +# Run complete test suite (recommended for final validation) +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +``` + +#### Interpret Results +- βœ… **Success**: Generated output matches baseline, error counts match expectations +- ❌ **Failure**: Review actual vs expected output, adjust baseline or fix grammar +- ⚠️ **Baseline Mismatch**: Copy correct "Actual" output to baseline file +- ⚠️ **Error Count Mismatch**: Adjust error expectations in test configuration + +## Test Categories and Patterns + +### Version-Specific Tests + +Each SQL version has its own test class: +- `TSql80SyntaxTests` - SQL Server 2000 +- `TSql90SyntaxTests` - SQL Server 2005 +- `TSql100SyntaxTests` - SQL Server 2008 +- `TSql110SyntaxTests` - SQL Server 2012 +- `TSql120SyntaxTests` - SQL Server 2014 +- `TSql130SyntaxTests` - SQL Server 2016 +- `TSql140SyntaxTests` - SQL Server 2017 +- `TSql150SyntaxTests` - SQL Server 2019 +- `TSql160SyntaxTests` - SQL Server 2022 +- `TSql170SyntaxTests` - SQL Server 2025 + +### Cross-Version Testing + +When you add a test to `Only160SyntaxTests.cs`, the framework automatically runs it against all SQL parsers: +- `TSql160SyntaxIn160ParserTest` - Parse with SQL 2022 parser (should succeed) +- `TSql160SyntaxIn150ParserTest` - Parse with SQL 2019 parser (may fail for new syntax) +- `TSql160SyntaxIn140ParserTest` - Parse with SQL 2017 parser (may fail for new syntax) +- ... and so on for all versions + +### Positive vs Negative Testing Strategy + +**CRITICAL**: When adding new T-SQL syntax, you must implement **both positive and negative tests**: + +#### Positive Tests (Success Cases) +- **Location**: `Test/SqlDom/OnlySyntaxTests.cs` +- **Purpose**: Verify syntax parses correctly and generates expected T-SQL +- **Pattern**: Round-trip testing (Parse β†’ Generate β†’ Compare baseline) + +#### Negative Tests (Error Cases) +- **Location**: `Test/SqlDom/ParserErrorsTests.cs` +- **Purpose**: Verify invalid syntax produces expected parse errors +- **Pattern**: Direct error validation with specific error codes and messages + +### Common Test Patterns + +#### Function Tests +```sql +-- Basic function call +SELECT YOUR_FUNCTION('param'); + +-- Function in different contexts +SELECT col1, YOUR_FUNCTION('param') AS computed FROM table1; +WHERE YOUR_FUNCTION('param') > 0; + +-- Function in RETURN statements (critical test) +ALTER FUNCTION Test() RETURNS NVARCHAR(MAX) AS BEGIN + RETURN (YOUR_FUNCTION('value')); +END; +``` + +#### Statement Tests +```sql +-- Basic statement +YOUR_STATEMENT option1, option2; + +-- With expressions +YOUR_STATEMENT @variable, 'literal', column_name; + +-- Complex nested scenarios +YOUR_STATEMENT ( + SELECT nested FROM table + WHERE condition = YOUR_FUNCTION('test') +); +``` + +#### Error Condition Tests +```sql +-- Invalid syntax that should produce parse errors +YOUR_STATEMENT INVALID SYNTAX HERE; + +-- Incomplete statements +YOUR_STATEMENT MISSING; +``` + +## Test Debugging and Troubleshooting + +### Common Issues + +#### 1. Baseline Mismatch +``` +Assert.AreEqual failed. Expected output does not match actual output. +Actual: 'SELECT JSON_ARRAY ('value1', 'value2');' +Expected: 'SELECT JSON_ARRAY('value1', 'value2');' +``` + +**Solution**: Copy the "Actual" output to your baseline file (note spacing differences). + +#### 2. Error Count Mismatch +``` +TestYourFeature.sql: number of errors after parsing is different from expected. +Expected: 1, Actual: 0 +``` + +**Solutions**: +- **If Actual < Expected**: Grammar now supports syntax in older versions β†’ Update error counts +- **If Actual > Expected**: Grammar has issues β†’ Fix grammar or adjust test + +#### 3. Parse Errors +``` +SQL46010: Incorrect syntax near 'YOUR_TOKEN'. at offset 45, line 2, column 15 +``` + +**Solutions**: +- Check grammar rules for your syntax +- Verify syntactic predicates are in correct order +- For RETURN statement issues, see [Function Guidelines](function.guidelines.instructions.md) + +#### 4. Missing Baseline Files +``` +System.IO.FileNotFoundException: Could not find file 'Baselines160\YourTest.sql' +``` + +**Solution**: Create the baseline file in correct directory with exact same name as test script. + +### Debugging Steps + +1. **Check File Names**: Ensure test script and baseline have identical names +2. **Verify File Location**: Scripts in `TestScripts/`, baselines in `Baselines/` +3. **Run Single Test**: Isolate issue by running specific test method +4. **Check Grammar**: Ensure grammar rules support your syntax +5. **Validate AST**: Verify AST nodes are properly generated +6. **Test Round-trip**: Parse β†’ Generate β†’ Parse should succeed + +## Best Practices + +### Test Design + +#### Comprehensive Coverage +```sql +-- βœ… Good: Covers multiple scenarios +SELECT JSON_ARRAY('simple'); +SELECT JSON_ARRAY('multiple', 'values', 123); +SELECT JSON_ARRAY(NULL); +SELECT JSON_ARRAY(); +SELECT JSON_ARRAY(@variable); +SELECT JSON_ARRAY(column_name); +ALTER FUNCTION Test() RETURNS NVARCHAR(MAX) AS BEGIN + RETURN (JSON_ARRAY('in_return')); +END; +``` + +**CRITICAL**: When testing syntax from user requests, **always include the exact T-SQL provided**: +```sql +-- βœ… Include the exact syntax from user prompt +SELECT JSON_OBJECTAGG( t.c1 : t.c2 ) +FROM ( + VALUES('key1', 'c'), ('key2', 'b'), ('key3','a') +) AS t(c1, c2); + +-- βœ… Then add additional test variations +SELECT JSON_OBJECTAGG( alias.col1 : alias.col2 ) FROM table_name alias; +SELECT JSON_OBJECTAGG( schema.table.col1 : schema.table.col2 ) FROM schema.table; +``` + +#### Focused Testing +```sql +-- ❌ Avoid: Mixing unrelated syntax in single test +SELECT JSON_ARRAY('test'); +CREATE TABLE test_table (id INT); -- Unrelated to JSON +INSERT INTO test_table VALUES (1); -- Unrelated to JSON +``` + +#### Edge Cases +```sql +-- βœ… Include edge cases +SELECT JSON_ARRAY(); -- Empty parameters +SELECT JSON_ARRAY(NULL, NULL); -- NULL handling +SELECT JSON_ARRAY('very_long_string_value_that_tests_parser_limits'); +SELECT JSON_ARRAY((SELECT nested FROM table)); -- Subqueries +``` + +### Error Expectations + +#### Version Compatibility +```csharp +// βœ… Good: Simplified - most new syntax fails in older versions +new ParserTest160("JsonTests160.sql"), // TSql160 expects success, older versions ignored + +// βœ… Good: Specify only when you need specific behavior +new ParserTest160("JsonTests160.sql", nErrors130: 0), // JSON supported since SQL 2016 + +// βœ… Good: Full specification when needed for precision +new ParserTest160("JsonTests160.sql", + nErrors80: 1, // JSON not in SQL 2000 + nErrors90: 1, // JSON not in SQL 2005 + // ... + nErrors150: 1, // JSON not in SQL 2019 + // nErrors160: 0 - JSON supported in SQL 2022 +), +``` + +#### Grammar Reality +```csharp +// ⚠️ Consider: Grammar changes may affect all versions +// If shared grammar makes function work in all SQL versions: +new ParserTest160("TestFunction160.sql"), // All versions will succeed + +// If function fails in older versions due to grammar limitations: +new ParserTest160("TestFunction160.sql", nErrors80: 1, nErrors90: 1), // Only specify versions that fail +``` + +### File Organization + +#### Logical Grouping +``` +TestScripts/ +β”œβ”€β”€ JsonFunctionTests160.sql # JSON-specific functions +β”œβ”€β”€ StringFunctionTests160.sql # String manipulation +β”œβ”€β”€ CreateTableTests170.sql # DDL statements +β”œβ”€β”€ SelectStatementTests170.sql # DML statements +└── AlterFunctionTests160.sql # Function-specific syntax +``` + +#### Version Alignment +``` +TestScripts/JsonTests160.sql ↔ Baselines160/JsonTests160.sql +TestScripts/JsonTests170.sql ↔ Baselines170/JsonTests170.sql +``` + +## Simplified Error Count Handling (TSql160+) + +### New Constructor Behavior + +Starting with `ParserTest160`, the constructor uses nullable integer parameters with default values of `null`. This pattern extends to later versions like `ParserTest170`: + +```csharp +public ParserTest160(string scriptFilename, + int? nErrors80 = null, // Default: null (ignored) + int? nErrors90 = null, // Default: null (ignored) + int? nErrors100 = null, // Default: null (ignored) + int? nErrors110 = null, // Default: null (ignored) + int? nErrors120 = null, // Default: null (ignored) + int? nErrors130 = null, // Default: null (ignored) + int? nErrors140 = null, // Default: null (ignored) + int? nErrors150 = null) // Default: null (ignored) + // TSql160 always expects 0 errors unless syntax is invalid + +public ParserTest170(string scriptFilename, + int? nErrors80 = null, // Default: null (ignored) + int? nErrors90 = null, // Default: null (ignored) + int? nErrors100 = null, // Default: null (ignored) + int? nErrors110 = null, // Default: null (ignored) + int? nErrors120 = null, // Default: null (ignored) + int? nErrors130 = null, // Default: null (ignored) + int? nErrors140 = null, // Default: null (ignored) + int? nErrors150 = null, // Default: null (ignored) + int? nErrors160 = null) // Default: null (ignored) + // TSql170 always expects 0 errors unless syntax is invalid +``` + +### Benefits + +1. **Simplified Test Creation**: Most tests only need the script filename +2. **Focus on What Matters**: Only specify error counts for versions where you expect specific behavior +3. **Reduced Maintenance**: No need to update all error counts when adding version-agnostic syntax +4. **Backward Compatibility**: Existing tests with full error specifications still work + +### Usage Patterns + +```csharp +// Minimal - test new SQL 2022 syntax +new ParserTest160("NewFeatureTests160.sql"), + +// Minimal - test new SQL 2025 syntax +new ParserTest170("NewFeatureTests170.sql"), + +// Specify only critical version boundaries +new ParserTest160("FeatureTests160.sql", nErrors130: 0), // Supported since SQL 2016 +new ParserTest170("FeatureTests170.sql", nErrors130: 0), // Supported since SQL 2016 + +// Mix of specified and default parameters +new ParserTest160("EdgeCaseTests160.sql", nErrors80: 2, nErrors150: 1), // SQL 2000 has 2 errors, SQL 2019 has 1 +new ParserTest170("EdgeCaseTests170.sql", nErrors80: 2, nErrors160: 1), // SQL 2000 has 2 errors, SQL 2022 has 1 + +// Legacy full specification still supported +new ParserTest160("LegacyTests160.sql", 1, 1, 1, 1, 1, 1, 1, 1), +new ParserTest170("LegacyTests170.sql", 1, 1, 1, 1, 1, 1, 1, 1, 1), +``` + +### When to Specify Error Counts + +- **Don't specify**: When older SQL versions should be ignored (most common case for both TSql160 and TSql170) +- **Specify as 0**: When feature was introduced in a specific older SQL version +- **Specify as 1+**: When you need to validate specific error conditions +- **Specify for debugging**: When investigating cross-version compatibility issues +- **TSql170 considerations**: Remember that TSql160 (SQL Server 2022) is now also a "previous version" when using ParserTest170 + +## Performance Considerations + +### Test Execution Time + +#### Minimal Test Sets +```bash +# Run specific version tests only +dotnet test --filter "TestCategory=TSql160" -c Debug + +# Run specific feature tests +dotnet test --filter "FullyQualifiedName~Json" -c Debug +``` + +#### Parallel Execution +```bash +# Use parallel test execution for faster runs +dotnet test --parallel -c Debug +``` + +#### Focused Development +```bash +# During development, run only your new tests +dotnet test --filter "FullyQualifiedName~YourTestMethod" -c Debug +``` + +### Build Performance + +#### Incremental Testing +1. Add test script and baseline +2. Run specific test to validate +3. Run full test suite only before commit + +#### Cached Builds +- Parser regeneration only occurs when grammar files change +- Test compilation is incremental +- Use `-c Debug` for faster iteration + +## Integration with Development Workflow + +### 1. Grammar Development +```bash +# After grammar changes +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Test specific functionality +dotnet test --filter "FullyQualifiedName~YourFeature" -c Debug +``` + +### 2. Test-Driven Development +```bash +# 1. Create failing test +dotnet test --filter "FullyQualifiedName~YourNewTest" -c Debug # Should fail + +# 2. Implement grammar changes +dotnet build -c Debug + +# 3. Update baseline and validate +dotnet test --filter "FullyQualifiedName~YourNewTest" -c Debug # Should pass + +# 4. Run regression tests +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug # Should all pass +``` + +### 3. Continuous Integration +```bash +# Full validation before commit +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +# Ensure: Total tests: 1,116, Failed: 0, Succeeded: 1,116 +``` + +## Common Test Scenarios + +### Adding New Function + +```sql +-- Test/SqlDom/TestScripts/NewFunctionTests160.sql (for SQL 2022) +-- Test/SqlDom/TestScripts/NewFunctionTests170.sql (for SQL 2025) +SELECT NEW_FUNCTION('param1', 'param2'); +SELECT NEW_FUNCTION(@variable); +SELECT NEW_FUNCTION(column_name); + +-- Critical: Test in RETURN statement +ALTER FUNCTION TestNewFunction() +RETURNS NVARCHAR(MAX) +AS +BEGIN + RETURN (NEW_FUNCTION('test_value')); +END; +GO +``` + +**Test Configuration**: +```csharp +// Simplified approach for SQL 2022 - NEW_FUNCTION is SQL 2022 syntax +new ParserTest160("NewFunctionTests160.sql"), + +// Simplified approach for SQL 2025 - NEW_FUNCTION is SQL 2025 syntax +new ParserTest170("NewFunctionTests170.sql"), + +// Or specify if function works in earlier versions +new ParserTest160("NewFunctionTests160.sql", nErrors140: 0), // Works since SQL 2017 +new ParserTest170("NewFunctionTests170.sql", nErrors140: 0), // Works since SQL 2017 +``` + +### Adding New Statement + +```sql +-- Test/SqlDom/TestScripts/NewStatementTests160.sql (for SQL 2022) +-- Test/SqlDom/TestScripts/NewStatementTests170.sql (for SQL 2025) +NEW_STATEMENT option1 = 'value1', option2 = 'value2'; + +NEW_STATEMENT + option1 = 'value1', + option2 = @parameter, + option3 = (SELECT nested FROM table); + +-- Test with expressions +NEW_STATEMENT computed_option = (value1 + value2); +``` + +**Test Configuration**: +```csharp +// For SQL 2022 syntax: +new ParserTest160("NewStatementTests160.sql"), + +// For SQL 2025 syntax: +new ParserTest170("NewStatementTests170.sql"), +``` + +### Testing Error Conditions + +```sql +-- Test/SqlDom/TestScripts/ErrorConditionTests160.sql +-- These should generate parse errors + +NEW_FUNCTION(); -- Invalid: missing required parameters +NEW_FUNCTION('param1',); -- Invalid: trailing comma +NEW_FUNCTION('param1' 'param2'); -- Invalid: missing comma +``` + +**Test Configuration**: +```csharp +// Test should fail parsing in TSql160 due to invalid syntax +new ParserTest160("ErrorConditionTests160.sql", + nErrors80: 3, // 3 syntax errors expected in all versions + nErrors90: 3, + nErrors100: 3, + nErrors110: 3, + nErrors120: 3, + nErrors130: 3, + nErrors140: 3, + nErrors150: 3, + nErrors160: 3), // Even TSql160 should have 3 errors - syntax is invalid + +// Test should fail parsing in TSql170 due to invalid syntax +new ParserTest170("ErrorConditionTests170.sql", + nErrors80: 3, // 3 syntax errors expected in all versions + nErrors90: 3, + nErrors100: 3, + nErrors110: 3, + nErrors120: 3, + nErrors130: 3, + nErrors140: 3, + nErrors150: 3, + nErrors160: 3, + nErrors170: 3), // Even TSql170 should have 3 errors - syntax is invalid + +// Or simplified if error count is same across all versions: +new ParserTest160("ErrorConditionTests160.sql", + nErrors80: 3, nErrors90: 3, nErrors100: 3, nErrors110: 3, + nErrors120: 3, nErrors130: 3, nErrors140: 3, nErrors150: 3, + nErrors160: 3), + +new ParserTest170("ErrorConditionTests170.sql", + nErrors80: 3, nErrors90: 3, nErrors100: 3, nErrors110: 3, + nErrors120: 3, nErrors130: 3, nErrors140: 3, nErrors150: 3, + nErrors160: 3, nErrors170: 3), +``` + +## Real-World Example: VECTOR Parsing Verification + +This example shows the correct approach used to verify VECTOR data type parsing functionality: + +```csharp +[TestMethod] +[Priority(0)] +[SqlStudioTestCategory(Category.UnitTest)] +public void VerifyComplexQueryFix() +{ + // Test VECTOR parsing in various contexts - this is the real bug we found and fixed + var parser = new TSql170Parser(true); + + // Test 1: Basic VECTOR with base type + var query1 = "SELECT CAST('[1,2,3]' AS VECTOR(3, Float32));"; + var result1 = parser.Parse(new StringReader(query1), out var errors1); + Assert.AreEqual(0, errors1.Count, "Basic VECTOR with base type should parse"); + + // Test 2: VECTOR in complex CAST (from original failing query) + var query2 = "SELECT CAST('[-6.464173E+08,1.040823E+07,1.699169E+08]' AS VECTOR(3, Float32));"; + var result2 = parser.Parse(new StringReader(query2), out var errors2); + Assert.AreEqual(0, errors2.Count, "VECTOR with scientific notation should parse"); + + // Test 3: VECTOR in CONVERT (from original failing query) + var query3 = "SELECT CONVERT(VECTOR(77), '[-7.230808E+08,4.075427E+08]');"; + var result3 = parser.Parse(new StringReader(query3), out var errors3); + Assert.AreEqual(0, errors3.Count, "VECTOR in CONVERT should parse"); + + // Test 4: VECTOR in JOIN context (simplified version of original complex query) + var query4 = @"SELECT t1.id + FROM table1 t1 + INNER JOIN table2 t2 ON t1.vector_col = CAST('[1,2,3]' AS VECTOR(3, Float32));"; + var result4 = parser.Parse(new StringReader(query4), out var errors4); + Assert.AreEqual(0, errors4.Count, "VECTOR in JOIN condition should parse"); + + Console.WriteLine("βœ… All VECTOR parsing tests passed - the original VECTOR bug is fixed!"); +} +``` + +**Key Points from This Example:** +1. Test was added directly to `Only170SyntaxTests.cs` - no new project created +2. Tests multiple contexts where the syntax appears (CAST, CONVERT, JOIN) +3. Uses inline assertions with descriptive messages +4. References the original bug being fixed +5. Provides immediate feedback via Console.WriteLine + +## Advanced Testing Patterns + +### Multi-File Tests + +For complex scenarios requiring multiple related test files: +``` +TestScripts/ +β”œβ”€β”€ ComplexScenarioTests160_Part1.sql +β”œβ”€β”€ ComplexScenarioTests160_Part2.sql +└── ComplexScenarioTests160_Integration.sql + +Baselines160/ +β”œβ”€β”€ ComplexScenarioTests160_Part1.sql +β”œβ”€β”€ ComplexScenarioTests160_Part2.sql +└── ComplexScenarioTests160_Integration.sql +``` + +### Version Migration Tests + +Testing syntax evolution across versions: +```sql +-- Test/SqlDom/TestScripts/FeatureEvolutionTests170.sql +-- Tests new syntax in 170 that extends 160 functionality +SELECT JSON_ARRAY('basic'); -- Supported in 160 +SELECT JSON_ARRAY('new', 'syntax', 'in', 'version', '170'); -- New in 170 +``` + +### Regression Tests + +When fixing bugs, add specific regression tests: +```sql +-- Test/SqlDom/TestScripts/RegressionBugFix12345Tests160.sql +-- Specific test case that reproduced bug #12345 +ALTER FUNCTION TestRegression() +RETURNS NVARCHAR(MAX) +AS +BEGIN + RETURN (JSON_OBJECT('key': (SELECT value FROM table))); +END; +``` + +**Test Configuration**: +```csharp +// Regression test - should work in TSql160, may fail in earlier versions +new ParserTest160("RegressionBugFix12345Tests160.sql"), + +// Regression test - should work in TSql170, may fail in earlier versions +new ParserTest170("RegressionBugFix12345Tests170.sql"), + +// Or if you need to verify the bug existed in specific versions: +new ParserTest160("RegressionBugFix12345Tests160.sql", nErrors150: 1), // Bug existed in SQL 2019 +new ParserTest170("RegressionBugFix12345Tests170.sql", nErrors160: 1), // Bug existed in SQL 2022 +``` + +## Summary + +The SqlScriptDOM testing framework provides comprehensive validation of parser functionality through: +- **Round-trip testing** (Parse β†’ Generate β†’ Parse) +- **Baseline comparison** (Generated output vs expected) +- **Cross-version validation** (Test syntax across SQL Server versions) +- **Error condition testing** (Invalid syntax produces expected errors) +- **Exact syntax verification** (Exact T-SQL from user requests is tested precisely) + +Following these guidelines ensures robust test coverage for parser functionality and prevents regressions when adding new features or fixing bugs. + +**Key Principle**: Always test the exact T-SQL syntax provided in user prompts or requests to verify that the specific syntax works as expected, rather than testing generalized or simplified versions of the syntax. \ No newline at end of file diff --git a/.github/prompts/new-feature-implementation.prompt.md b/.github/prompts/new-feature-implementation.prompt.md new file mode 100644 index 0000000..dd87fc9 --- /dev/null +++ b/.github/prompts/new-feature-implementation.prompt.md @@ -0,0 +1,518 @@ +# New SQL Server Feature Implementation Guide + +This prompt will identify the type of SQL Server feature you want to add to SqlScriptDOM and **automatically implement it** using the appropriate guideline. After feature type identification, it will execute the complete implementation workflow. + +## Feature Type Identification + +Please answer the following questions to determine the best implementation approach: + +### 1. What type of SQL Server feature are you implementing? + +**A) Data Type** - A new SQL Server data type (e.g., VECTOR, GEOMETRY, GEOGRAPHY) +- Example: `DECLARE @embedding AS VECTOR(1536, FLOAT32)` +- Example: `CREATE TABLE tbl (geo_data GEOGRAPHY)` +- **Key indicators**: New type with custom parameters, specialized syntax for type definitions + +**B) Index Type** - A specialized index type with unique syntax (e.g., JSON INDEX, VECTOR INDEX) +- Example: `CREATE JSON INDEX IX_JSON ON table (column) FOR ('$.path1', '$.path2')` +- Example: `CREATE VECTOR INDEX IX_VECTOR ON table (column) WITH (METRIC = 'cosine')` +- **Key indicators**: CREATE [TYPE] INDEX syntax, type-specific clauses or options + +**C) System Function** - A new T-SQL built-in function (e.g., JSON_OBJECT, JSON_ARRAY) +- Example: `SELECT JSON_OBJECT('key1': 'value1', 'key2': 'value2')` +- Example: `RETURN JSON_ARRAY('item1', 'item2', 'item3')` +- **Key indicators**: Function calls in expressions, may need RETURN statement support + +**D) Grammar/Syntax Enhancement** - New operators, statements, or syntax modifications +- Example: Adding new WHERE clause predicates like `REGEXP_LIKE` +- Example: New statement types or operators +- **Key indicators**: Parser doesn't recognize syntax, needs AST updates + +**E) Validation Fix** - Existing syntax fails validation but should work per SQL Server docs +- Example: ALTER TABLE RESUMABLE option works in ALTER INDEX but not ALTER TABLE +- **Key indicators**: "Option 'X' is not valid..." errors, similar syntax works elsewhere + +**F) Parser Predicate Issue** - Identifier-based predicates fail with parentheses +- Example: `WHERE REGEXP_LIKE(...)` works but `WHERE (REGEXP_LIKE(...))` fails +- **Key indicators**: Syntax errors near closing parenthesis with identifier predicates + +### 2. SQL Server Feature Details + +**Feature Name**: _______________ +**SQL Server Version**: _______________ +**Example Syntax**: +```sql +-- Provide 2-3 examples of the syntax you want to support +``` + +**Current Behavior**: _______________ +**Expected Behavior**: _______________ + +### 3. Feature Characteristics + +Check all that apply to your feature: + +- [ ] Requires completely new AST node classes +- [ ] Extends existing AST nodes with new members +- [ ] Needs new grammar rules in .g files +- [ ] Requires new keywords/constants +- [ ] Needs specialized script generation logic +- [ ] Has version-specific behavior (SQL Server 2014+, 2022+, etc.) +- [ ] Includes optional syntax elements or clauses +- [ ] Supports collections/lists of parameters +- [ ] Requires new validation logic +- [ ] Needs new index options or statement options + +## AUTO-IMPLEMENTATION TRIGGER + +**To begin automatic implementation, provide your feature details in this exact format:** + +``` +Feature Name: [Your feature name] +SQL Server Version: [SQL Server version] +Exact T-SQL Syntax: +```sql +[Copy the EXACT T-SQL syntax from the user's request here] +``` +Feature Type: [Will be determined from analysis below] +``` + +**The system will then automatically identify the feature type and begin implementation.** + +## Implementation Guidance + +Based on your feature type identification below, the system will automatically execute the appropriate implementation workflow: + +### β†’ Data Type (Answer A) +**Auto-Executes**: [New Data Types Guidelines](../instructions/new_data_types.guidelines.instructions.md) + +**Automatic implementation includes**: +- Creating new `DataTypeReference` AST classes +- Adding specialized parsing rules for custom type syntax +- Implementing parameter handling (dimensions, base types, etc.) +- Script generation for type definitions +- Version-specific type support +- Comprehensive testing across all SQL contexts + +**Best for**: VECTOR, custom CLR types, spatial types, hierarchical types + +### β†’ Index Type (Answer B) +**Auto-Executes**: [New Index Types Guidelines](../instructions/new_index_types.guidelines.instructions.md) + +**Automatic implementation includes**: +- Creating new `IndexStatement` AST classes +- Implementing type-specific index syntax parsing +- Adding specialized clauses (FOR, WITH type-specific options) +- Index option registration and validation +- Script generation for index statements +- Integration with existing index framework + +**Best for**: JSON INDEX, VECTOR INDEX, SPATIAL INDEX, custom index types + +### β†’ System Function (Answer C) +**Auto-Executes**: [Function Guidelines](../instructions/function.guidelines.instructions.md) + +**Automatic implementation includes**: +- Function AST design for new T-SQL functions +- Grammar rules with syntactic predicates for RETURN statement support +- ANTLR v2 lookahead limitations and solutions +- Script generation for function calls +- Comprehensive testing in all expression contexts + +**Best for**: JSON_OBJECT, JSON_ARRAY, AI functions, mathematical functions + +### β†’ Grammar/Syntax Enhancement (Answer D) +**Auto-Executes**: [Bug Fixing Guidelines](../instructions/bug_fixing.guidelines.instructions.md) + +**Automatic implementation includes**: +- Grammar rule modifications and AST updates +- Script generation implementation +- Testing framework integration +- Extending literals to expressions pattern +- Version compatibility considerations + +**Best for**: New operators, statement types, expression enhancements + +### β†’ Validation Fix (Answer E) +**Auto-Executes**: [Validation Fix Guidelines](../instructions/validation_fix.guidelines.instructions.md) + +**Automatic implementation includes**: +- Version-gated validation fixes +- SQL Server version compatibility checks +- Context-specific validation rules +- Testing validation behavior across versions +- No grammar changes needed + +**Best for**: Feature works in one context but not another, version support issues + +### β†’ Parser Predicate Issue (Answer F) +**Auto-Executes**: [Parser Guidelines](../instructions/parser.guidelines.instructions.md) + +**Automatic implementation includes**: +- Identifier-based predicate recognition fixes +- `IsNextRuleBooleanParenthesis()` function updates +- Syntactic vs semantic predicate handling +- Parentheses support in boolean contexts + +**Best for**: Functions work without parentheses but fail with them + +## Grammar Extension Patterns + +For users implementing Grammar/Syntax Enhancement (Option D), here are common patterns: + +### Pattern 1: Extending Literals to Expressions + +#### When to Use +When existing grammar rules only accept literal values but need to support dynamic expressions like parameters, variables, or computed values. + +#### Example Problem +Functions or constructs that currently accept only: +- `IntegerLiteral` (e.g., `TOP_N = 10`) +- `StringLiteral` (e.g., `VALUE = 'literal'`) + +But need to support: +- Parameters: `@parameter` +- Variables: `@variable` +- Column references: `table.column` +- Outer references: `outerref.column` +- Function calls: `FUNCTION(args)` +- Computed expressions: `value + 1` + +#### ⚠️ Critical Warning: Avoid Modifying Shared Grammar Rules + +**DO NOT** modify existing shared grammar rules like `identifierColumnReferenceExpression` that are used throughout the codebase. This can cause unintended side effects and break other functionality. + +**Instead**, create specialized rules for your specific context. + +#### Solution Template + +**Step 1: Update AST Definition (`Ast.xml`)** +```xml + + + + + +``` + +**Step 2: Create Context-Specific Grammar Rule (`TSql*.g`)** +```antlr +// Create a specialized rule for your context +yourContextColumnReferenceExpression returns [ColumnReferenceExpression vResult = this.FragmentFactory.CreateFragment()] +{ + MultiPartIdentifier vMultiPartIdentifier; +} + : + vMultiPartIdentifier=multiPartIdentifier[2] // Allows table.column syntax + { + vResult.ColumnType = ColumnType.Regular; + vResult.MultiPartIdentifier = vMultiPartIdentifier; + } + ; + +// Use the specialized rule in your custom grammar +yourContextParameterRule returns [ScalarExpression vResult] + : vResult=signedInteger + | vResult=variable + | vResult=yourContextColumnReferenceExpression // Context-specific rule + | vResult=expression // Allows computed expressions + ; +``` + +**Step 3: Verify Script Generator** +Most script generators using `GenerateNameEqualsValue()` or similar methods work automatically with `ScalarExpression`. No changes typically needed. + +#### Real-World Example: VECTOR_SEARCH TOP_N + +**Problem**: `VECTOR_SEARCH` TOP_N parameter only accepted integer literals. + +**❌ Wrong Approach**: Modify `identifierColumnReferenceExpression` to use `multiPartIdentifier[2]` +- **Result**: Broke `CreateIndexStatementErrorTest` because other grammar rules started accepting invalid syntax + +**βœ… Correct Approach**: Create `vectorSearchColumnReferenceExpression` specialized for VECTOR_SEARCH +- **Result**: VECTOR_SEARCH supports multi-part identifiers without affecting other functionality + +**Final Implementation**: +```antlr +signedIntegerOrVariableOrColumnReference returns [ScalarExpression vResult] + : vResult=signedInteger + | vResult=variable + | vResult=vectorSearchColumnReferenceExpression // VECTOR_SEARCH-specific rule + ; + +vectorSearchColumnReferenceExpression returns [ColumnReferenceExpression vResult = ...] + : + vMultiPartIdentifier=multiPartIdentifier[2] // Allows table.column syntax + { + vResult.ColumnType = ColumnType.Regular; + vResult.MultiPartIdentifier = vMultiPartIdentifier; + } + ; +``` + +**Result**: Now supports dynamic TOP_N values: +```sql +-- Parameters +VECTOR_SEARCH(..., TOP_N = @k) AS ann + +-- Outer references +VECTOR_SEARCH(..., TOP_N = outerref.max_results) AS ann +``` + +### Pattern 2: Adding New Enum Members + +#### When to Use +When adding new operators, keywords, or options to existing constructs. + +#### Solution Template + +**Step 1: Update Enum in AST (`Ast.xml`)** +```xml + + + + + +``` + +**Step 2: Update Grammar Rule (`TSql*.g`)** +```antlr +// Add new token matching +| tNewValue:Identifier +{ + Match(tNewValue, CodeGenerationSupporter.NewValue); + vResult.EnumProperty = ExistingEnumType.NewValue; +} +``` + +**Step 3: Update Script Generator** +```csharp +// Add mapping in appropriate generator file +private static readonly Dictionary _enumGenerators = + new Dictionary() +{ + { EnumType.ExistingValue1, CodeGenerationSupporter.ExistingValue1 }, + { EnumType.ExistingValue2, CodeGenerationSupporter.ExistingValue2 }, + { EnumType.NewValue, CodeGenerationSupporter.NewValue }, // Add this +}; +``` + +### Pattern 3: Adding New Function or Statement + +#### When to Use +When adding completely new T-SQL functions or statements. + +#### Solution Template + +**Step 1: Define AST Node (`Ast.xml`)** +```xml + + + + +``` + +**Step 2: Add Grammar Rule (`TSql*.g`)** +```antlr +newFunctionCall returns [NewFunctionCall vResult = FragmentFactory.CreateFragment()] +{ + ScalarExpression vParam1; + StringLiteral vParam2; +} + : + tFunction:Identifier LeftParenthesis + { + Match(tFunction, CodeGenerationSupporter.NewFunction); + UpdateTokenInfo(vResult, tFunction); + } + vParam1 = expression + { + vResult.Parameter1 = vParam1; + } + Comma vParam2 = stringLiteral + { + vResult.Parameter2 = vParam2; + } + RightParenthesis + ; +``` + +**Step 3: Integrate with Existing Rules** +Add the new rule to appropriate places in the grammar (e.g., `functionCall`, `primaryExpression`, etc.). + +**Step 4: Create Script Generator** +```csharp +public override void ExplicitVisit(NewFunctionCall node) +{ + GenerateIdentifier(CodeGenerationSupporter.NewFunction); + GenerateSymbol(TSqlTokenType.LeftParenthesis); + GenerateFragmentIfNotNull(node.Parameter1); + GenerateSymbol(TSqlTokenType.Comma); + GenerateFragmentIfNotNull(node.Parameter2); + GenerateSymbol(TSqlTokenType.RightParenthesis); +} +``` + +## Quick Decision Tree + +**Start here** β†’ Does the syntax exist in SQL Server documentation? + +**No** β†’ Use Grammar/Syntax Enhancement (D) + +**Yes** β†’ Does SqlScriptDOM recognize the syntax? + +**No** β†’ What type of syntax? +- Data type declaration β†’ Data Type (A) +- CREATE [TYPE] INDEX β†’ Index Type (B) +- Function call β†’ System Function (C) +- Other syntax β†’ Grammar/Syntax Enhancement (D) + +**Yes** β†’ Does it parse without errors? + +**No** β†’ Parser Predicate Issue (F) + +**Yes** β†’ Does validation reject it incorrectly? + +**Yes** β†’ Validation Fix (E) + +**No** β†’ Review existing implementation or check for edge cases + +## Pre-Implementation Checklist + +Before starting implementation: + +- [ ] Verified feature exists in SQL Server documentation +- [ ] Identified target SQL Server version for the feature +- [ ] Confirmed feature doesn't already exist in SqlScriptDOM +- [ ] Collected comprehensive syntax examples from SQL Server docs +- [ ] Reviewed similar existing implementations in the codebase +- [ ] Selected appropriate guideline based on feature type + +## Testing Strategy + +Regardless of feature type, ensure you: + +- [ ] Create comprehensive test scripts covering all syntax variations +- [ ] Generate proper baseline files with expected formatted output +- [ ] Configure error counts for all SQL Server versions +- [ ] Run full test suite to prevent regressions +- [ ] Test edge cases, quoted identifiers, and schema qualification +- [ ] Verify round-trip parsing (parse β†’ generate β†’ parse) + +## Additional Resources + +- **Main Copilot Instructions**: [copilot-instructions.md](../copilot-instructions.md) +- **Testing Framework Guide**: [Testing Guidelines](../instructions/testing.guidelines.instructions.md) +- **Grammar Extension Patterns**: See Grammar Extension Patterns section above +- **Detailed Grammar Guidelines**: [Grammar Guidelines](../instructions/grammer.guidelines.instructions.md) + +--- + +**Ready to implement?** Follow the guideline that matches your feature type above. Each guide provides step-by-step instructions, real-world examples, and comprehensive testing strategies. + +## Implementation Workflow + +**IMPORTANT**: After identifying your feature type above, this prompt will automatically begin implementation. Provide the following information to start: + +### Required Information +1. **Feature Name**: _______________ +2. **SQL Server Version**: _______________ +3. **Exact T-SQL Syntax Examples**: +```sql +-- Provide the EXACT syntax you want to support (copy-paste from user request) +-- Example: SELECT JSON_OBJECTAGG( t.c1 : t.c2 ) FROM (VALUES('key1', 'c'), ('key2', 'b'), ('key3','a')) AS t(c1, c2); +``` +4. **Feature Type** (from analysis above): A, B, C, D, E, or F + +### Automatic Implementation Process + +Once you provide the information above, this prompt will: + +#### Phase 1: Analysis and Verification (Always Done First) +1. **Verify current status** using the exact syntax provided +2. **Search existing codebase** for similar implementations +3. **Identify SQL Server version** and parser target +4. **Create implementation plan** with specific steps +5. **Show the plan** and get confirmation before proceeding + +#### Phase 2: Implementation (Executed Automatically) +Based on feature type identification: + +**For Grammar/Syntax Enhancement (Type D)**: +1. **Update AST definition** (`Ast.xml`) if new nodes needed +2. **Add grammar rules** in appropriate `TSql*.g` files +3. **Create script generators** for new AST nodes +4. **Build and validate** parser compilation +5. **Create comprehensive tests** with exact syntax provided +6. **Generate baseline files** from parser output +7. **Run full test suite** to ensure no regressions + +**For Validation Fix (Type E)**: +1. **Locate validation function** throwing the error +2. **Verify Microsoft documentation** for version support +3. **Apply version-gated validation** (not unconditional rejection) +4. **Create test cases** covering all scenarios +5. **Build and validate** the fix +6. **Run full test suite** to ensure correctness + +**For System Function (Type C)**: +1. **Define AST node structure** for the function +2. **Add grammar rules** with syntactic predicates for RETURN statement support +3. **Create script generator** for the function +4. **Build and test** grammar changes +5. **Create comprehensive test scripts** including RETURN statement usage +6. **Validate full test suite** for regressions + +**For Data Type (Type A)**: +1. **Define AST node** inheriting from `DataTypeReference` +2. **Create specialized parsing rule** for the data type +3. **Integrate with scalar data type rule** +4. **Add string constants** for keywords +5. **Create script generator** +6. **Build and comprehensive test** across all SQL contexts + +**For Index Type (Type B)**: +1. **Define AST node** inheriting from `IndexStatement` +2. **Create specialized parsing rule** for the index type +3. **Integrate with main index grammar** +4. **Add index options** if needed +5. **Create script generator** +6. **Build and comprehensive test** all syntax variations + +**For Parser Predicate Issue (Type F)**: +1. **Locate `IsNextRuleBooleanParenthesis()`** function +2. **Add identifier-based predicate detection** +3. **Build and test** the fix +4. **Create tests** covering parentheses scenarios +5. **Validate** existing functionality + +#### Phase 3: Validation and Documentation +1. **Run complete test suite** (`dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug`) +2. **Verify all tests pass** (expect 1,100+ tests to succeed) +3. **Document changes made** with before/after examples +4. **Provide usage examples** showing the new functionality + +### Starting Implementation + +To begin implementation, provide your feature details using this format: + +``` +Feature Name: [FUNCTION_NAME or FEATURE_NAME] +SQL Server Version: [SQL Server 20XX / TSqlXXX] +Exact T-SQL Syntax: +```sql +[EXACT_COPY_OF_SYNTAX_FROM_USER_REQUEST] +``` +Feature Type: [A/B/C/D/E/F based on analysis above] +``` + +**The prompt will then automatically execute the appropriate implementation workflow and start making the necessary code changes.** + +### Implementation Principles + +1. **Always test exact syntax first**: Use the exact T-SQL provided, not simplified versions +2. **Follow established patterns**: Reuse existing patterns from similar implementations +3. **Maintain backward compatibility**: Ensure existing functionality continues to work +4. **Comprehensive testing**: Test all syntax variations, edge cases, and error conditions +5. **Version compatibility**: Consider which SQL Server versions should support the feature +6. **Full regression testing**: Always run the complete test suite before completion \ No newline at end of file diff --git a/.github/prompts/verify-and-test-tsql-syntax.prompt.md b/.github/prompts/verify-and-test-tsql-syntax.prompt.md new file mode 100644 index 0000000..95fb51f --- /dev/null +++ b/.github/prompts/verify-and-test-tsql-syntax.prompt.md @@ -0,0 +1,692 @@ +--- +title: How to Verify T-SQL Syntax Support and Add Tests +description: Step-by-step guide to check if a T-SQL syntax is already supported and how to add comprehensive test coverage +tags: [testing, verification, tsql, syntax, parser, baseline] +--- + +# How to Verify T-SQL Syntax Support and Add Tests + +This guide helps you determine if a T-SQL syntax is already supported by ScriptDOM and shows you how to add proper test coverage. + +## Step 0: Verify the Exact Script First + +**CRITICAL**: Before doing anything else, test the exact T-SQL script provided to confirm whether it works or fails. + +**IMPORTANT**: For initial verification, you MUST add a debug unit test method directly to an existing test file (like Only170SyntaxTests.cs). This is only for initial verification. Once you confirm the syntax status, you'll follow the proper testing workflow to add comprehensive test coverage. + +### Step 1: Add Debug Unit Test Method + +Add this debug test method to the appropriate test file (e.g., `Test/SqlDom/Only170SyntaxTests.cs`): + +```csharp +[TestMethod] +public void DebugExactScriptTest() +{ + // PUT THE EXACT T-SQL SCRIPT HERE - DO NOT CREATE SEPARATE FILES + string script = @"SELECT Id, + DATEADD(DAY, 1, GETDATE()) +FROM Table1"; + + Console.WriteLine($"Testing exact script: {script}"); + + // Test with the target parser version first (e.g., TSql170) + TSql170Parser parser170 = new TSql170Parser(true); + IList errors170; + + using (StringReader reader = new StringReader(script)) + { + TSqlFragment fragment = parser170.Parse(reader, out errors170); + + Console.WriteLine($"\n=== TSql170 Parser Results ==="); + if (errors170.Count == 0) + { + Console.WriteLine("βœ… SUCCESS: Parsed without errors"); + + // Test script generation (round-trip) + Sql170ScriptGenerator generator = new Sql170ScriptGenerator(); + string generatedScript; + generator.GenerateScript(fragment, out generatedScript); + Console.WriteLine($"Generated: {generatedScript}"); + } + else + { + Console.WriteLine($"❌ FAILED: {errors170.Count} parse errors:"); + foreach (var error in errors170) + { + Console.WriteLine($" Line {error.Line}, Col {error.Column}: {error.Message}"); + } + } + } + + // Test with older parser version for comparison (e.g., TSql160) + TSql160Parser parser160 = new TSql160Parser(true); + IList errors160; + + using (StringReader reader = new StringReader(script)) + { + TSqlFragment fragment = parser160.Parse(reader, out errors160); + + Console.WriteLine($"\n=== TSql160 Parser Results ==="); + if (errors160.Count == 0) + { + Console.WriteLine("βœ… SUCCESS: Parsed without errors"); + } + else + { + Console.WriteLine($"❌ FAILED: {errors160.Count} parse errors:"); + foreach (var error in errors160) + { + Console.WriteLine($" Line {error.Line}, Col {error.Column}: {error.Message}"); + } + } + } + + // Use Assert.Inconclusive to document current status without failing the test + if (errors170.Count > 0) + { + Assert.Inconclusive($"Script currently fails with {errors170.Count} errors. Needs implementation."); + } + else + { + Assert.Inconclusive("Script already works! Can proceed to add comprehensive test coverage."); + } +} +``` + +### Step 2: Build and Run the Debug Test +### Step 2: Build and Run the Debug Test + +```bash +# 1. Build the parser to ensure it's up to date +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# 2. Run the debug test to see current status +dotnet test --filter "DebugExactScriptTest" Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# 3. Check the test output for detailed results +# Look for the console output showing parsing results +``` + +### Interpret Results + +- **βœ… SUCCESS**: Script works! You can skip to Step 6 to add comprehensive tests +- **❌ FAILURE**: Script fails. Continue with Steps 1-5 to implement the missing functionality + +**Important**: Always test the **exact script provided** character-for-character, including: +- Exact table/column names (e.g., `t.c1`, `t.c2`) +- Exact function syntax (e.g., `JSON_OBJECTAGG( t.c1 : t.c2 )`) +- Complete query context (FROM clause, subqueries, etc.) +- Exact whitespace and formatting as provided + +**Remember**: Only add unit test methods to existing test files. Do not create separate SQL files, program files, or any other external files. + +## Step 1: Determine the SQL Server Version + +First, identify which SQL Server version introduced the syntax you want to test. + +### SQL Server Version Mapping + +| SQL Server Version | Parser Version | Year | Common Name | +|-------------------|----------------|------|-------------| +| SQL Server 2000 | TSql80 | 2000 | SQL Server 2000 | +| SQL Server 2005 | TSql90 | 2005 | SQL Server 2005 | +| SQL Server 2008 | TSql100 | 2008 | SQL Server 2008 | +| SQL Server 2012 | TSql110 | 2012 | SQL Server 2012 | +| SQL Server 2014 | TSql120 | 2014 | SQL Server 2014 | +| SQL Server 2016 | TSql130 | 2016 | SQL Server 2016 | +| SQL Server 2017 | TSql140 | 2017 | SQL Server 2017 | +| SQL Server 2019 | TSql150 | 2019 | SQL Server 2019 | +| SQL Server 2022 | TSql160 | 2022 | SQL Server 2022 | +| SQL Server 2025 | TSql170 | 2025 | SQL Server 2025 | + +### How to Find the Version + +1. **Check Microsoft Documentation**: Look for "Applies to: SQL Server 20XX (XX.x)" +2. **Search Online**: Look for the feature announcement or blog posts +3. **Test in SSMS**: Connect to different SQL Server versions and try the syntax + +**Example**: +- `RESUMABLE = ON` for ALTER TABLE β†’ SQL Server 2022 β†’ **TSql160** +- `MAX_DURATION` for indexes β†’ SQL Server 2014 β†’ **TSql120** +- `VECTOR_SEARCH` function β†’ SQL Server 2025 β†’ **TSql170** + +## Step 2: Check if Syntax is Already Supported + +### Method 1: Search Test Scripts (Fastest) +```bash +# Search for the keyword in test scripts +grep -r "YOUR_KEYWORD" Test/SqlDom/TestScripts/ + +# Example: Check if RESUMABLE is tested for ALTER TABLE +grep -r "RESUMABLE" Test/SqlDom/TestScripts/*.sql + +# Search in specific version test files +grep -r "RESUMABLE" Test/SqlDom/TestScripts/*160.sql +``` + +### Method 2: Search Grammar Files +```bash +# Search in grammar files +grep -r "YOUR_KEYWORD" SqlScriptDom/Parser/TSql/*.g + +# Example: Check if RESUMABLE is in grammar +grep -r "Resumable" SqlScriptDom/Parser/TSql/TSql160.g +``` + +### Method 3: Search AST Definitions +```bash +# Search in AST XML +grep -r "YourFeatureName" SqlScriptDom/Parser/TSql/Ast.xml + +# Example: Check for VECTOR_SEARCH node +grep -r "VectorSearch" SqlScriptDom/Parser/TSql/Ast.xml +``` + +### Method 4: Try Parsing with Test Script +Create a unit test method to verify parsing: + +```csharp +// Add to appropriate test file (e.g., Test/SqlDom/Only170SyntaxTests.cs) +[TestMethod] +public void QuickTestExactSyntax() +{ + string script = @"ALTER TABLE t ADD CONSTRAINT pk PRIMARY KEY (id) WITH (RESUMABLE = ON);"; + + TSql170Parser parser = new TSql170Parser(true); + IList errors; + + using (StringReader reader = new StringReader(script)) + { + TSqlFragment fragment = parser.Parse(reader, out errors); + Console.WriteLine($"Errors: {errors.Count}"); + + // This will show you exactly which parser versions support the syntax + // and what error messages are generated if it fails + } + + Assert.Inconclusive($"Test completed with {errors.Count} errors"); +} +``` + +### Method 5: Test in Existing Test Framework +Add a temporary test method to verify quickly: + +```csharp +// Add to appropriate test file (e.g., Test/SqlDom/Only170SyntaxTests.cs) +[TestMethod] +public void TempTestExactScript() +{ + // Put ONLY your exact script here - do not create external files + string script = @"YOUR_EXACT_SCRIPT_HERE"; + + TSql170Parser parser = new TSql170Parser(true); + IList errors; + + using (StringReader reader = new StringReader(script)) + { + TSqlFragment fragment = parser.Parse(reader, out errors); + Console.WriteLine($"Parse result: {errors.Count} errors"); + foreach (var error in errors) + { + Console.WriteLine($"Error: {error.Message}"); + } + } + + Assert.Inconclusive("Temporary test - remove after verification"); +} +``` + +Then run the test: +```bash +dotnet test --filter "TempTestExactScript" -c Debug +``` + +Remember to remove this temporary test method after verification. + +## Step 3: Create a Test Script + +**CRITICAL**: Your test script MUST include the exact T-SQL statement provided. Don't modify, simplify, or generalize the syntax - test the precise statement given. + +### Test File Naming Convention +Follow the pattern from testing.guidelines.instructions.md: +- Format: `Tests.sql` +- Examples: `JsonFunctionTests160.sql`, `AlterTableResumableTests160.sql` +- Location: `Test/SqlDom/TestScripts/` +- Use version number corresponding to SQL Server version where feature was introduced + +### Test Script Requirements + +1. **Start with the exact script provided** - copy it exactly as given +2. **Add comprehensive coverage** as described in testing guidelines: + - Basic syntax variations + - Function in different contexts (SELECT, WHERE, RETURN statements) + - Edge cases (empty parameters, NULL handling, subqueries) + - Integration contexts (variables, parameters, computed expressions) +3. **Include context** - ensure the exact context (table aliases, subqueries) is tested +4. **Test RETURN statements** - Critical for functions, always test in ALTER FUNCTION RETURN statements + +### Test Script Template + +Follow the comprehensive coverage pattern from testing guidelines: + +```sql +-- Test 1: EXACT SCRIPT PROVIDED (REQUIRED - COPY EXACTLY) +-- PUT THE EXACT T-SQL STATEMENT HERE WITHOUT ANY MODIFICATIONS +-- Example: SELECT JSON_OBJECTAGG( t.c1 : t.c2 ) FROM (VALUES('key1', 'c'), ('key2', 'b'), ('key3','a')) AS t(c1, c2); + +-- Test 2: Basic function call (if applicable) +SELECT YOUR_FUNCTION('param1', 'param2'); + +-- Test 3: Function in different contexts +SELECT col1, YOUR_FUNCTION('param') AS computed FROM table1; +WHERE YOUR_FUNCTION('param') > 0; + +-- Test 4: CRITICAL - Function in RETURN statements (for functions) +ALTER FUNCTION TestYourFunction() +RETURNS NVARCHAR(MAX) +AS +BEGIN + RETURN (YOUR_FUNCTION('test_value')); +END; +GO + +-- Test 5: With variables/parameters +SELECT YOUR_FUNCTION(@variable); +SELECT YOUR_FUNCTION(column_name); + +-- Test 6: Edge cases +SELECT YOUR_FUNCTION(); -- Empty parameters (if valid) +SELECT YOUR_FUNCTION(NULL, 'test', 123); -- NULL handling +SELECT YOUR_FUNCTION((SELECT nested FROM table)); -- Subqueries +``` + +### Real-World Example: ALTER TABLE RESUMABLE + +**File**: `Test/SqlDom/TestScripts/AlterTableResumableTests160.sql` + +```sql +-- Test 1: RESUMABLE with MAX_DURATION (minutes) +ALTER TABLE dbo.MyTable +ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) +WITH (RESUMABLE = ON, MAX_DURATION = 240 MINUTES); + +-- Test 2: RESUMABLE = ON +ALTER TABLE dbo.MyTable +ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) +WITH (RESUMABLE = ON); + +-- Test 3: RESUMABLE = OFF +ALTER TABLE dbo.MyTable +ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) +WITH (RESUMABLE = OFF); + +-- Test 4: UNIQUE constraint with RESUMABLE +ALTER TABLE dbo.MyTable +ADD CONSTRAINT uq_test UNIQUE NONCLUSTERED (name) +WITH (RESUMABLE = ON); +``` + +## Step 4: Configure Test Entry + +Add test configuration to the appropriate `OnlySyntaxTests.cs` file as described in testing guidelines. + +### Test Configuration File Location +- Format: `Only{Version}SyntaxTests.cs` +- Example: `Only160SyntaxTests.cs` (for SQL Server 2022) +- Location: `Test/SqlDom/` +- Add to the `Only{Version}TestInfos` array + +### Test Configuration Template + +Use the simplified approach from testing guidelines: + +```csharp +// Option 1: Simplified - only specify error counts you care about +new ParserTest{Version}("YourFeatureTests{Version}.sql"), // All previous versions default to null (ignored), current version expects 0 errors + +// Option 2: Specify only some previous version error counts +new ParserTest{Version}("YourFeatureTests{Version}.sql", nErrors80: 1, nErrors90: 1), // Only SQL 2000/2005 expect errors + +// Option 3: Full specification (legacy compatibility) +new ParserTest{Version}("YourFeatureTests{Version}.sql", + nErrors80: 1, // SQL Server 2000 - expect error for new syntax + nErrors90: 1, // SQL Server 2005 - expect error for new syntax + nErrors100: 1, // SQL Server 2008 - expect error for new syntax + nErrors110: 1, // SQL Server 2012 - expect error for new syntax + nErrors120: 1, // SQL Server 2014 - expect error for new syntax + nErrors130: 1, // SQL Server 2016 - expect error for new syntax + nErrors140: 1, // SQL Server 2017 - expect error for new syntax + nErrors150: 1 // SQL Server 2019 - expect error for new syntax + // nErrors{Version}: 0 is implicit for current version - expect success +), +``` + +### How to Determine Error Counts + +**Rule**: Count the number of SQL statements that will fail in each version. + +**Example**: If your test file has 4 statements with the new feature: +- Versions that DON'T support it: `nErrors = 4` (all 4 statements fail) +- Version that DOES support it: `nErrors = 0` (all 4 statements pass, implicit default) + +### Real-World Example: ALTER TABLE RESUMABLE + +**File**: `Test/SqlDom/Only160SyntaxTests.cs` + +```csharp +new ParserTest160("AlterTableResumableTests160.sql", + nErrors80: 4, // SQL Server 2000: RESUMABLE not supported (4 errors) + nErrors90: 4, // SQL Server 2005: RESUMABLE not supported (4 errors) + nErrors100: 4, // SQL Server 2008: RESUMABLE not supported (4 errors) + nErrors110: 4, // SQL Server 2012: RESUMABLE not supported (4 errors) + nErrors120: 4, // SQL Server 2014: RESUMABLE not supported (4 errors) + nErrors130: 4, // SQL Server 2016: RESUMABLE not supported (4 errors) + nErrors140: 4, // SQL Server 2017: RESUMABLE not supported (4 errors) + nErrors150: 4 // SQL Server 2019: RESUMABLE not supported (4 errors) + // nErrors160: 0 (implicit) - SQL Server 2022: RESUMABLE supported! (0 errors) +), +``` + +## Step 5: Create Baseline File + +Baseline files contain the expected formatted output after parsing and script generation. + +### Baseline File Location +- Format: `Baselines{Version}/YourTestFile{Version}.sql` +- Example: `Baselines160/AlterTableResumableTests160.sql` +- Location: `Test/SqlDom/` +- **Critical**: Baseline filename MUST exactly match the test script filename + +### Baseline Generation Process + +Follow the testing guidelines process: + +#### Initial Creation: +1. **Create empty or placeholder baseline file first** +2. **Run the test** (it will fail) +3. **Copy "Actual" output** from test failure message +4. **Paste into baseline file** with proper formatting + +```bash +# 1. Create placeholder baseline file +New-Item "Test/SqlDom/Baselines160/YourFeatureTests160.sql" -ItemType File + +# 2. Run the test (will fail initially) +dotnet test --filter "YourFeatureTests160" -c Debug + +# 3. Copy the "Actual" output from test failure into baseline file +# Look for the test failure message showing: +# Expected: +# Actual: + +# 4. Re-run the test (should pass now) +dotnet test --filter "YourFeatureTests160" -c Debug +``` + +#### Option B: Manual Creation + +Create the baseline file with properly formatted SQL: + +```sql +-- Baseline follows ScriptDOM formatting rules: +-- - Keywords in UPPERCASE +-- - Proper indentation +-- - Line breaks at appropriate places + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = ON, MAX_DURATION = 240 MINUTES); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = ON); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = OFF); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT uq_test UNIQUE NONCLUSTERED (name) WITH (RESUMABLE = ON); +``` + +### Real-World Example: ALTER TABLE RESUMABLE Baseline + +**File**: `Test/SqlDom/Baselines160/AlterTableResumableTests160.sql` + +```sql +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = ON, MAX_DURATION = 240 MINUTES); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = ON); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT pk_test PRIMARY KEY CLUSTERED (id) WITH (RESUMABLE = OFF); + +ALTER TABLE dbo.MyTable + ADD CONSTRAINT uq_test UNIQUE NONCLUSTERED (name) WITH (RESUMABLE = ON); +``` + +## Step 6: Run and Validate Test + +Follow the testing guidelines validation process. + +### Build the Parser +```bash +# Build ScriptDOM library +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Build test project +dotnet build Test/SqlDom/UTSqlScriptDom.csproj -c Debug +``` + +### Run Your Specific Test +```bash +# Run specific test method +dotnet test Test/SqlDom/UTSqlScriptDom.csproj --filter "FullyQualifiedName~TSql160SyntaxIn160ParserTest" -c Debug + +# Run tests for specific version +dotnet test Test/SqlDom/UTSqlScriptDom.csproj --filter "TestCategory=TSql160" -c Debug + +# Run by test script name filter +dotnet test --filter "YourFeatureTests160" -c Debug + +# Run with verbose output to see details +dotnet test --filter "YourFeatureTests160" -c Debug -v detailed +``` + +### Run Full Test Suite (CRITICAL!) +```bash +# Always run ALL tests before committing +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Expected output: +# Test summary: total: 1116, failed: 0, succeeded: 1116, skipped: 0 +``` + +### Interpret Results + +Follow the testing guidelines interpretation: + +- βœ… **Success**: Generated output matches baseline, error counts match expectations +- ❌ **Failure**: Review actual vs expected output, adjust baseline or fix grammar +- ⚠️ **Baseline Mismatch**: Copy correct "Actual" output to baseline file +- ⚠️ **Error Count Mismatch**: Adjust error expectations in test configuration + +### Common Test Results + +βœ… **Success**: All tests pass, including your new test +``` +Test summary: total: 1116, failed: 0, succeeded: 1116, skipped: 0 +``` + +❌ **Baseline Mismatch**: Generated output doesn't match baseline +``` +Expected: +Actual: +``` +**Solution**: Copy the "Actual" output to your baseline file (note spacing differences) + +❌ **Error Count Mismatch**: Parse error count differs from expected +``` +TestYourFeature.sql: number of errors after parsing is different from expected. +Expected: 1, Actual: 0 +``` +**Solutions**: +- **If Actual < Expected**: Grammar now supports syntax in older versions β†’ Update error counts +- **If Actual > Expected**: Grammar has issues β†’ Fix grammar or adjust test + +❌ **Parse Errors**: Syntax not recognized +``` +SQL46010: Incorrect syntax near 'YOUR_TOKEN'. at offset 45, line 2, column 15 +``` +**Solutions**: Check grammar rules, verify syntactic predicates, see function guidelines for RETURN statement issues + +## Complete Example Workflow + +### Example: Testing ALTER TABLE RESUMABLE for SQL Server 2022 + +```bash +# Step 0: Test exact script first +echo "ALTER TABLE MyTable ADD CONSTRAINT pk PRIMARY KEY (id) WITH (RESUMABLE = ON);" > temp_test_script.sql +# Add debug test method to Only160SyntaxTests.cs and run to confirm current status + +# Step 1: Determine version +# Research shows: RESUMABLE for ALTER TABLE added in SQL Server 2022 β†’ TSql160 + +# Step 2: Check if already supported +grep -r "RESUMABLE" Test/SqlDom/TestScripts/*.sql +# Result: Found in ALTER INDEX tests, but not ALTER TABLE tests + +# Step 3: Create test script +New-Item "Test/SqlDom/TestScripts/AlterTableResumableTests160.sql" +# Add 4 test cases covering different scenarios + +# Step 4: Add test configuration +# Edit Test/SqlDom/Only160SyntaxTests.cs +# Add: new ParserTest160("AlterTableResumableTests160.sql", nErrors80: 4, ...) + +# Step 5: Create empty baseline +New-Item "Test/SqlDom/Baselines160/AlterTableResumableTests160.sql" + +# Step 6: Build and run test +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug +dotnet test --filter "AlterTableResumableTests160" -c Debug +# Test fails - copy "Actual" output into baseline file + +# Step 7: Re-run test +dotnet test --filter "AlterTableResumableTests160" -c Debug +# Test passes! + +# Step 8: Run full suite +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +# All 1120 tests pass! + +# Step 9: Commit changes +git add Test/SqlDom/TestScripts/AlterTableResumableTests160.sql +git add Test/SqlDom/Baselines160/AlterTableResumableTests160.sql +git add Test/SqlDom/Only160SyntaxTests.cs +git commit -m "Add tests for ALTER TABLE RESUMABLE option (SQL Server 2022)" +``` + +## Testing Best Practices + +### 1. Comprehensive Coverage +- βœ… **TEST EXACT SCRIPT PROVIDED** (most critical) +- βœ… Test basic syntax variations +- βœ… Test with multiple options +- βœ… Test different statement variations +- βœ… Test with parameters/variables (if applicable) +- βœ… Test edge cases +- βœ… Test error conditions (if relevant) +- βœ… Test complete context (subqueries, table aliases, etc.) + +### 2. Baseline Accuracy +- βœ… Generate baseline from actual parser output +- βœ… Don't hand-edit baseline formatting +- βœ… Verify baseline matches ScriptDOM formatting conventions +- βœ… Check for proper indentation and line breaks + +### 3. Version-Specific Testing +- βœ… Test only in the version where feature was introduced +- βœ… Verify older versions properly reject the syntax +- βœ… Document version dependencies clearly + +### 4. Regression Prevention +- βœ… Always run full test suite before committing +- βœ… Investigate any unexpected test failures +- βœ… Don't assume your change is isolated + +## Common Pitfalls + +### ❌ Wrong Version Number +**Problem**: Testing in TSql150 when feature is TSql160-only +**Solution**: Verify SQL Server version in Microsoft docs + +### ❌ Incorrect Error Counts +**Problem**: `nErrors80: 2` but test has 4 failing statements +**Solution**: Count all statements that use the new feature + +### ❌ Hand-Edited Baselines +**Problem**: Baseline formatting doesn't match ScriptDOM output +**Solution**: Always copy from actual parser output + +### ❌ Skipping Full Test Suite +**Problem**: Your change breaks existing tests +**Solution**: Run `dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug` + +### ❌ Missing Test Cases +**Problem**: Feature works for basic case but fails with parameters +**Solution**: Add comprehensive test coverage + +### ❌ Not Testing Exact Script +**Problem**: Testing simplified/modified versions instead of the exact script provided +**Solution**: Always include the exact T-SQL statement as provided, character-for-character + +## Troubleshooting + +### Test Fails: "Syntax error near..." +**Diagnosis**: Parser doesn't recognize the syntax +**Solution**: Grammar needs to be updated (see [Bug Fixing Guide](../instructions/bug_fixing.guidelines.instructions.md)) + +### Test Fails: "Option 'X' is not valid..." +**Diagnosis**: Validation logic rejects the syntax +**Solution**: See [Validation Fix Guide](../instructions/validation_fix.guidelines.instructions.md) + +### Test Fails: Baseline mismatch +**Diagnosis**: Generated output differs from baseline +**Solution**: Update baseline with actual output or fix generator + +### Full Suite Fails: Other tests break +**Diagnosis**: Your changes affected shared code +**Solution**: Review your changes, create context-specific rules + +## Quick Reference Commands + +```bash +# Step 0: Add debug unit test method first (NO external files) +# Add DebugExactScriptTest method to appropriate test file with exact script embedded + +# Search for syntax in tests +grep -r "KEYWORD" Test/SqlDom/TestScripts/ + +# Search in grammar +grep -r "KEYWORD" SqlScriptDom/Parser/TSql/*.g + +# Build parser +dotnet build SqlScriptDom/Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + +# Run specific test +dotnet test --filter "TestName" -c Debug + +# Run full suite +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug + +# Create test files (only for comprehensive testing, not initial verification) +New-Item "Test/SqlDom/TestScripts/MyTest160.sql" +New-Item "Test/SqlDom/Baselines160/MyTest160.sql" +``` + +## Related Guides + +- [debugging_workflow.guidelines.instructions.md](../instructions/debugging_workflow.guidelines.instructions.md) - How to diagnose issues +- [Validation_fix.guidelines.instructions.md](../instructions/validation_fix.guidelines.instructions.md) - Fix validation errors +- [Bug Fixing Guide](../instructions/bug_fixing.guidelines.instructions.md) - Add new grammar rules +- [copilot-instructions.md](../copilot-instructions.md) - Main project documentation diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..3137cb9 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,13 @@ +# Description + +*Please provide a detailed description. Be as descriptive as possible - include information about what is being changed, +why it's being changed, and any links to relevant issues. If this is closing an existing issue use one of the [issue linking keywords](https://docs.github.com/issues/tracking-your-work-with-issues/using-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) to link the issue to this PR and have it automatically close when completed.* + +In addition, go through the checklist below and check each item as you validate it is either handled or not applicable to this change. + +# Code Changes + +- [ ] [Unit tests](https://github.com/microsoft/SqlScriptDOM/tree/main/Test) are added, if possible +- [ ] Existing [tests are passing](https://github.com/microsoft/SqlScriptDOM/blob/main/CONTRIBUTING.md#running-the-tests) +- [ ] New or updated code follows the guidelines [here](https://github.com/microsoft/SqlScriptDOM/blob/main/CONTRIBUTING.md#helpful-notes-for-sqldom-extensions) + diff --git a/.github/workflows/pr-validation.yml b/.github/workflows/pr-validation.yml index 892a474..9e3f2d7 100644 --- a/.github/workflows/pr-validation.yml +++ b/.github/workflows/pr-validation.yml @@ -18,6 +18,12 @@ jobs: run: dotnet restore - name: Build run: dotnet build dirs.proj + - name: Archive antlr log for troubleshooting + if: ${{ failure() && matrix.os == 'ubuntu-latest' }} + uses: actions/upload-artifact@v4 + with: + name: antlr-log + path: SqlScriptDom/NUL test: runs-on: ${{ matrix.os }} strategy: @@ -26,6 +32,9 @@ jobs: needs: build steps: - uses: actions/checkout@v2 + - uses: actions/setup-dotnet@v2 + with: + global-json-file: global.json - name: Disable strong name validation if: matrix.os == 'windows-latest' shell: pwsh diff --git a/.gitignore b/.gitignore index c606ba9..997c172 100644 --- a/.gitignore +++ b/.gitignore @@ -356,4 +356,7 @@ MigrationBackup/ out/ # Project packages folder -.packages/ \ No newline at end of file +.packages/ + +# Temporary build artifacts +tmp/ \ No newline at end of file diff --git a/.vscode/mcp.json b/.vscode/mcp.json new file mode 100644 index 0000000..a49772b --- /dev/null +++ b/.vscode/mcp.json @@ -0,0 +1,25 @@ +{ + "servers": { + "ado": { + "type": "stdio", + "command": "npx", + "args": ["-y", "@azure-devops/mcp", "msdata"], + "env": { + "ADO_DEFAULT_PROJECT": "SQLToolsAndLibraries", + "ADO_DEFAULT_REPO": "ScriptDOM", + "ADO_DEFAULT_BRANCH": "main", + "ADO_DEFAULT_AREA_PATH": "SQLToolsAndLibraries\\DacFx" + } + }, + "my-mcp-mini-drivers": { + "url": "https://mcp.bluebird-ai.net", + "type": "http", + "headers": { + "x-mcp-ec-organization": "msdata", + "x-mcp-ec-project": "SQLToolsAndLibraries", + "x-mcp-ec-repository": "ScriptDOM", + "x-mcp-ec-branch": "main" + } + } + } +} \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index dbdf428..cc61129 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -106,34 +106,70 @@ Example: To run all priority 0 tests dotnet test --filter Priority=0 ``` +#### ⚠️ CRITICAL: Full Test Suite for Parser Changes + +**If you make ANY changes to grammar files (`.g` files) or AST definitions (`Ast.xml`), you MUST run the complete test suite** to ensure no regressions: + +```cmd +dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug +``` + +**Why this is critical for parser changes:** +- Grammar changes can have far-reaching effects on seemingly unrelated functionality +- Shared grammar rules are used in multiple contexts throughout the parser +- AST modifications can affect script generation and visitor patterns across the entire codebase +- Token recognition changes can impact parsing of statements that don't even use the modified feature + +**Example of unexpected failures:** +- Modifying a shared rule like `identifierColumnReferenceExpression` can cause other tests to fail because the rule now accepts syntax that should be rejected in different contexts +- Changes to operator precedence can affect unrelated expressions +- Adding new AST members without proper script generation support can break round-trip parsing + +Always verify that all ~557 tests pass before submitting your changes. + ### Pull Request Process Before sending a Pull Request, please do the following: -1. Ensure builds are still successful and tests, including any added or updated tests, pass prior to submitting the pull request. -2. Update any documentation, user and contributor, that is impacted by your changes. -3. Include your change description in `CHANGELOG.md` file as part of pull request. -4. You may merge the pull request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you. +1. **For parser changes (grammar/AST modifications): Run the complete test suite** (`dotnet test Test/SqlDom/UTSqlScriptDom.csproj -c Debug`) and ensure all ~557 tests pass. Grammar changes can have unexpected side effects. +2. Ensure builds are still successful and tests, including any added or updated tests, pass prior to submitting the pull request. +3. Update any documentation, user and contributor, that is impacted by your changes. +4. Include your change description in `CHANGELOG.md` file as part of pull request. +5. You may merge the pull request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you. ### Helpful notes for SQLDOM extensions -1. For changing the DOM classes, modify the XML file (the C# code is generated based on this during the build process) `$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\Parser\TSql\Ast.xml`. Change Ast.xml to put the class pieces on their appropriate statements. - 1. The build process is defined in `$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\SqlScriptDom.props` (Target Name="CreateAST") - 2. The generated files are dropped in `$(EnlistmentRoot)\obj\\\SqlScriptDom.csproj\` +1. For changing the DOM classes, modify the XML file (the C# code is generated based on this during the build process) `SqlScriptDom\Parser\TSql\Ast.xml`. Change Ast.xml to put the class pieces on their appropriate statements. + 1. The build process is defined in `SqlScriptDom\GenerateFiles.props` (Target Name="CreateAST") + 2. The generated files are dropped in `obj\SqlScriptDom\AnyCPU\\\Microsoft.SqlServer.TransactSql.ScriptDom.csproj\` + + Regenerating generated sources (what to run and when) + --------------------------------------------------- + When you modify `Source\SqlDom\SqlScriptDom\Parser\TSql\Ast.xml` or any `TSql<#>.g` grammar file, the C# parser and DOM sources are produced by MSBuild generation targets (for example `CreateAST`). These targets are invoked automatically during a normal build, so in most cases you can simply run: + + ```powershell + dotnet build Source\SqlDom\SqlScriptDom\Microsoft.SqlServer.TransactSql.ScriptDom.csproj -c Debug + ``` + + If you only want to run generation targets (no compile) or need more detailed generation logs, invoke the MSBuild targets directly: + + ```powershell + dotnet msbuild Source\SqlDom\SqlScriptDom\Microsoft.SqlServer.TransactSql.ScriptDom.csproj -t:GLexerParserCompile;GSqlTokenTypesCompile;CreateAST -p:Configuration=Debug + ``` + + Generated files are written into the `obj` folder for that project (for example `obj\SqlScriptDom\AnyCPU\\\Microsoft.SqlServer.TransactSql.ScriptDom.csproj\`). If antlr or related tools are missing, see `Directory.Build.props` for `AntlrLocation` and follow the repo guidance to supply the binaries. 2. For changing the parser, modify the .g file here: -`$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\Parser\TSql\TSql<#>.g` where # is the version (ie - 100, 120, 130). This will usually be the latest number if adding new grammar. Change the Tsql(xxx).g file to parse the new syntax. - 1. The build process is defined in `$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\SqlScriptDom.props` (Target Name="CreateAST") - 2. The generated files are dropped in `$(EnlistmentRoot)\obj\x86|x64\Debug|Release\sqlscriptdom.csproj\` +`SqlScriptDom\Parser\TSql\TSql<#>.g` where # is the version (ie - 100, 120, 130). This will usually be the latest number if adding new grammar. Change the Tsql(xxx).g file to parse the new syntax. + 1. The build process is defined in `SqlScriptDom\GenerateFiles.props` (Target Name="CreateAST") + 2. The generated files are dropped in `obj\SqlScriptDom\AnyCPU\\\Microsoft.SqlServer.TransactSql.ScriptDom.csproj\` -3. For changing the ScriptGenerator, modify the appropriate file (i.e. Visitor that accepts the modified DOM class) in here: `$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\ScriptDom\SqlServer\ScriptGenerator`. - 1. To add a new ScriptGenerator, you need to add the file to `$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\SqlScriptDom.props` +3. For changing the ScriptGenerator, modify the appropriate file (i.e. Visitor that accepts the modified DOM class) in here: `SqlScriptDom\ScriptDom\SqlServer\ScriptGenerator`. 1. Change The visitors SqlScriptGenerator.X to use the new piece from AST.XML 1. If you're adding syntax that's Azure-only or Standalone-only, implement appropriate Versioning Visitor for your constructs. -4. When adding/removing new files please add/remove an entry to/from `$(EnlistmentRoot)\Source\SqlDom\SqlScriptDom\SqlScriptDom.csproj` -5. To extend the tests do the following: +4. To extend the tests do the following: 1. Baselines# needs to be updated or added with the appropriate .sql file as expected results. 1. The Only#SyntaxTests.cs needs to be extended or added to specify the appropriate TestScripts script. 1. Positive tests go in Only#SyntaxTests.cs if adding new grammar. diff --git a/Directory.Build.props b/Directory.Build.props index 4b5babd..d72f0a6 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -26,10 +26,12 @@ - net462 + net472 + netstandard2.1 + netstandard2.0 net8.0 - $(NetfxVersion);$(NetCoreVersion) + $(NetStandardVersion);$(NetfxVersion);$(NetCoreVersion);$(NetStandardVersion20) false Microsoft SQL Script DOM diff --git a/README.md b/README.md index 867bd92..8029697 100644 --- a/README.md +++ b/README.md @@ -14,11 +14,11 @@ SQL Script DOM is a .NET library that provides formatting and parsing capabiliti Script DOM is used by DacFX and as an standalone library for client applications. ## Resources: -- πŸ“¦ NuGet package: https://www.nuget.org/packages/Microsoft.SqlServer.TransactSql.ScriptDom -- πŸ“š API docs: https://learn.microsoft.com/dotnet/api/microsoft.sqlserver.transactsql.scriptdom -- πŸ› οΈ Contributor's guide: [CONTRIBUTING.md](./CONTRIBUTING.md) -- πŸ’¬ Discussions: [DacFx discussions](https://github.com/microsoft/DacFx/discussions) - +- πŸ“¦ [NuGet package](https://www.nuget.org/packages/Microsoft.SqlServer.TransactSql.ScriptDom) +- πŸ“š [API docs](https://learn.microsoft.com/dotnet/api/microsoft.sqlserver.transactsql.scriptdom) +- πŸ› οΈ [Contributor's guide](./CONTRIBUTING.md) +- πŸ’¬ [Discussions](https://github.com/microsoft/DacFx/discussions) +- πŸ›« [Release notes](./release-notes) ## Built With * https://www.antlr.org/ diff --git a/SqlScriptDom/GenerateFiles.props b/SqlScriptDom/GenerateFiles.props index 1571a61..b94a61f 100644 --- a/SqlScriptDom/GenerateFiles.props +++ b/SqlScriptDom/GenerateFiles.props @@ -11,6 +11,7 @@ + diff --git a/SqlScriptDom/Parser/TSql/Ast.xml b/SqlScriptDom/Parser/TSql/Ast.xml index 24ef837..d17d34e 100644 --- a/SqlScriptDom/Parser/TSql/Ast.xml +++ b/SqlScriptDom/Parser/TSql/Ast.xml @@ -236,6 +236,7 @@ + @@ -270,6 +271,11 @@ Summary="The xml data type option."/> + + + + + @@ -349,6 +355,11 @@ + + + + + @@ -403,8 +414,8 @@ Summary="Actual JSON for clause options. First one is always present (JSON mode)."/> - - + + @@ -641,7 +652,10 @@ + + + @@ -1346,6 +1360,27 @@ + + + + + + + + + + + + + + + + + + + + + @@ -1459,6 +1494,10 @@ + + + + @@ -2400,6 +2439,7 @@ + @@ -2486,6 +2526,10 @@ + + + + @@ -2529,6 +2573,10 @@ + + + + @@ -2568,6 +2616,9 @@ + + + @@ -2670,6 +2721,9 @@ + + + @@ -2702,6 +2756,11 @@ + + + + + @@ -3538,7 +3597,7 @@ - + @@ -4580,6 +4639,20 @@ + + + + + + + + + + + + + + @@ -4612,6 +4685,14 @@ + + + + + + + + @@ -4671,4 +4752,61 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SqlScriptDom/Parser/TSql/BooleanComparisonType.cs b/SqlScriptDom/Parser/TSql/BooleanComparisonType.cs index ce9d15b..adb2630 100644 --- a/SqlScriptDom/Parser/TSql/BooleanComparisonType.cs +++ b/SqlScriptDom/Parser/TSql/BooleanComparisonType.cs @@ -66,5 +66,10 @@ public enum BooleanComparisonType /// The distinct predicate, IS NOT DISTINCT FROM. /// IsNotDistinctFrom = 12, + + /// + /// The NOT LIKE predicate + /// + NotLike = 13, } } diff --git a/SqlScriptDom/Parser/TSql/CodeGenerationSupporter.cs b/SqlScriptDom/Parser/TSql/CodeGenerationSupporter.cs index 918e01c..4f7e5be 100644 --- a/SqlScriptDom/Parser/TSql/CodeGenerationSupporter.cs +++ b/SqlScriptDom/Parser/TSql/CodeGenerationSupporter.cs @@ -1,4 +1,4 @@ -//------------------------------------------------------------------------------ +ο»Ώ//------------------------------------------------------------------------------ // // Copyright (c) Microsoft Corporation. All rights reserved. // @@ -99,6 +99,15 @@ internal static class CodeGenerationSupporter internal const string Affinity = "AFFINITY"; internal const string After = "AFTER"; internal const string Aggregate = "AGGREGATE"; + internal const string AiGenerateChunks = "AI_GENERATE_CHUNKS"; + internal const string AIGenerateEmbeddings = "AI_GENERATE_EMBEDDINGS"; + internal const string AIAnalyzeSentiment = "AI_ANALYZE_SENTIMENT"; + internal const string AIClassify = "AI_CLASSIFY"; + internal const string AIExtract = "AI_EXTRACT"; + internal const string AIFixGrammar = "AI_FIX_GRAMMAR"; + internal const string AIGenerateResponse = "AI_GENERATE_RESPONSE"; + internal const string AISummarize = "AI_SUMMARIZE"; + internal const string AITranslate = "AI_TRANSLATE"; internal const string Algorithm = "ALGORITHM"; internal const string AlterColumn = "ALTERCOLUMN"; internal const string All = "ALL"; @@ -118,6 +127,7 @@ internal static class CodeGenerationSupporter internal const string Always = "ALWAYS"; internal const string Anonymous = "ANONYMOUS"; internal const string AnsiNullDefault = "ANSI_NULL_DEFAULT"; + internal const string ApiFormat = "API_FORMAT"; internal const string Application = "APPLICATION"; internal const string ApplicationLog = "APPLICATION_LOG"; internal const string Apply = "APPLY"; @@ -198,6 +208,8 @@ internal static class CodeGenerationSupporter internal const string CheckPolicy = "CHECK_POLICY"; internal const string Checksum = "CHECKSUM"; internal const string ChecksumAgg = "CHECKSUM_AGG"; + internal const string ChunkSize = "CHUNK_SIZE"; + internal const string ChunkType = "CHUNK_TYPE"; internal const string ModularSum = "MODULAR_SUM"; internal const string Classifier = "CLASSIFIER"; internal const string Classification = "CLASSIFICATION"; @@ -208,6 +220,7 @@ internal static class CodeGenerationSupporter internal const string Cluster = "CLUSTER"; internal const string Clustered = "CLUSTERED"; internal const string ClearPort = "CLEAR_PORT"; + internal const string Clone = "CLONE"; internal const string CodePage = "CODEPAGE"; internal const string Collection = "COLLECTION"; internal const string Column = "COLUMN"; @@ -242,6 +255,7 @@ internal static class CodeGenerationSupporter internal const string CopyCommand = "COPY"; internal const string CopyOnly = "COPY_ONLY"; internal const string Correlated = "CORRELATED"; + internal const string Cosine = "COSINE"; internal const string Count = "COUNT"; internal const string CountBig = "COUNT_BIG"; internal const string Counter = "COUNTER"; @@ -298,6 +312,7 @@ internal static class CodeGenerationSupporter internal const string Dependents = "DEPENDENTS"; internal const string Description = "DESCRIPTION"; internal const string DesiredState = "DESIRED_STATE"; + internal const string DiskANN = "DISKANN"; internal const string Delay = "DELAY"; internal const string DelayedDurability = "DELAYED_DURABILITY"; internal const string DelimitedText = "DELIMITEDTEXT"; @@ -316,6 +331,7 @@ internal static class CodeGenerationSupporter internal const string Document = "DOCUMENT"; internal const string DollarSign = "$"; internal const string DollarPartition = "$PARTITION"; + internal const string Dot = "DOT"; internal const string Drop = "DROP"; internal const string DropExisting = "DROP_EXISTING"; internal const string DTSBuffers = "DTS_BUFFERS"; @@ -325,11 +341,13 @@ internal static class CodeGenerationSupporter internal const string Edition = "EDITION"; internal const string ElasticPool = "ELASTIC_POOL"; internal const string Elements = "ELEMENTS"; + internal const string Embeddings = "EMBEDDINGS"; internal const string Emergency = "EMERGENCY"; internal const string Empty = "EMPTY"; internal const string Enable = "ENABLE"; internal const string Enabled = "ENABLED"; internal const string EnableBroker = "ENABLE_BROKER"; + internal const string EnableChunkSetId = "ENABLE_CHUNK_SET_ID"; internal const string EnclaveComputations = "ENCLAVE_COMPUTATIONS"; internal const string Encoding = "ENCODING"; internal const string Encrypted = "ENCRYPTED"; @@ -352,6 +370,7 @@ internal static class CodeGenerationSupporter internal const string ErrorFileCredential = "ERRORFILE_CREDENTIAL"; internal const string EscapeChar = "ESCAPECHAR"; internal const string EstimateOnly = "ESTIMATEONLY"; + internal const string Euclidean = "EUCLIDEAN"; internal const string Event = "EVENT"; internal const string EventRetentionMode = "EVENT_RETENTION_MODE"; internal const string Exclamation = "!"; @@ -410,10 +429,13 @@ internal static class CodeGenerationSupporter internal const string FieldQuote = "FIELDQUOTE"; internal const string FipsFlagger = "FIPS_FLAGGER"; internal const string First = "FIRST"; + internal const string Fixed = "FIXED"; internal const string FlushIntervalSeconds = "FLUSH_INTERVAL_SECONDS"; internal const string FlushIntervalSecondsAlt = "DATA_FLUSH_INTERVAL_SECONDS"; internal const string Fn = "FN"; internal const string Float = "FLOAT"; + internal const string Float16 = "FLOAT16"; + internal const string Float32 = "FLOAT32"; internal const string For = "FOR"; internal const string ForceFailoverAllowDataLoss = "FORCE_FAILOVER_ALLOW_DATA_LOSS"; internal const string ForceScan = "FORCESCAN"; @@ -509,6 +531,12 @@ internal static class CodeGenerationSupporter internal const string Json = "JSON"; internal const string JsonArray = "JSON_ARRAY"; internal const string JsonObject = "JSON_OBJECT"; + internal const string JsonObjectAgg = "JSON_OBJECTAGG"; + internal const string JsonArrayAgg = "JSON_ARRAYAGG"; + internal const string JsonQuery = "JSON_QUERY"; + internal const string JsonValue = "JSON_VALUE"; + internal const string Array = "ARRAY"; + internal const string Wrapper = "WRAPPER"; internal const string Keep = "KEEP"; internal const string KeepDefaults = "KEEPDEFAULTS"; internal const string KeepFixed = "KEEPFIXED"; @@ -541,6 +569,7 @@ internal static class CodeGenerationSupporter internal const string Level3 = "LEVEL_3"; internal const string Level4 = "LEVEL_4"; internal const string Library = "LIBRARY"; + internal const string Like = "LIKE"; internal const string LifeTime = "LIFETIME"; internal const string Linux = "LINUX"; internal const string List = "LIST"; @@ -550,6 +579,7 @@ internal static class CodeGenerationSupporter internal const string LoadHistory = "LOADHISTORY"; internal const string LobCompaction = "LOB_COMPACTION"; internal const string Local = "LOCAL"; + internal const string LocalRuntimePath = "LOCAL_RUNTIME_PATH"; internal const string Location = "LOCATION"; internal const string LocationUserDB = "USER_DB"; internal const string LocalServiceName = "LOCAL_SERVICE_NAME"; @@ -563,6 +593,7 @@ internal static class CodeGenerationSupporter internal const string LSquareParen = "["; internal const string MaintainIndex = "MAINTAIN_INDEX"; internal const string Manual = "MANUAL"; + internal const string ManualCutover = "MANUAL_CUTOVER"; internal const string Mark = "MARK"; internal const string MarkInUseForRemoval = "MARK_IN_USE_FOR_REMOVAL"; internal const string Masked = "MASKED"; @@ -609,6 +640,7 @@ internal static class CodeGenerationSupporter internal const string Message = "MESSAGE"; internal const string MessageForwarding = "MESSAGE_FORWARDING"; internal const string MessageForwardSize = "MESSAGE_FORWARD_SIZE"; + internal const string Metric = "METRIC"; internal const string MigrationState = "MIGRATION_STATE"; internal const string Min = "MIN"; internal const string MinGrantPercent = "MIN_GRANT_PERCENT"; @@ -621,6 +653,8 @@ internal static class CodeGenerationSupporter internal const string Mirror = "MIRROR"; internal const string Mixed = "MIXED"; internal const string MixedPageAllocation = "MIXED_PAGE_ALLOCATION"; + internal const string ModelType = "MODEL_TYPE"; + internal const string ModelName = "MODEL"; internal const string Modify = "MODIFY"; internal const string Money = "MONEY"; internal const string Move = "MOVE"; @@ -694,6 +728,8 @@ internal static class CodeGenerationSupporter internal const string OperatorAudit = "OPERATOR_AUDIT"; internal const string Optimistic = "OPTIMISTIC"; internal const string Optimize = "OPTIMIZE"; + internal const string OptimizedLocking = "OPTIMIZED_LOCKING"; + internal const string OptimizeForArraySearch = "OPTIMIZE_FOR_ARRAY_SEARCH"; internal const string OptimizeForSequentialKey = "OPTIMIZE_FOR_SEQUENTIAL_KEY"; internal const string OptimizerQueue = "OPTIMIZER_QUEUE"; internal const string Order = "ORDER"; @@ -727,6 +763,7 @@ internal static class CodeGenerationSupporter internal const string Paused = "PAUSED"; internal const string Percentage = "PERCENTAGE"; internal const string PerCpu = "PER_CPU"; + internal const string PerformCutover = "PERFORM_CUTOVER"; internal const string Period = "PERIOD"; internal const string PermissionSet = "PERMISSION_SET"; internal const string PerNode = "PER_NODE"; @@ -753,6 +790,7 @@ internal static class CodeGenerationSupporter internal const string Model = "MODEL"; internal const string RunTime = "RUNTIME"; internal const string Onnx = "ONNX"; + internal const string Overlap = "OVERLAP"; internal const string Process = "PROCESS"; internal const string PropertySetGuid = "PROPERTY_SET_GUID"; @@ -798,6 +836,9 @@ internal static class CodeGenerationSupporter internal const string RecursiveTriggers = "RECURSIVE_TRIGGERS"; internal const string Recovery = "RECOVERY"; internal const string Regenerate = "REGENERATE"; + internal const string RegexpLike = "REGEXP_LIKE"; + internal const string RegexpMatches = "REGEXP_MATCHES"; + internal const string RegexpSplitToTable = "REGEXP_SPLIT_TO_TABLE"; internal const string RejectType = "REJECT_TYPE"; internal const string RejectSampleValue = "REJECT_SAMPLE_VALUE"; internal const string RejectValue = "REJECT_VALUE"; @@ -832,6 +873,7 @@ internal static class CodeGenerationSupporter internal const string RetentionDays = "RETENTION_DAYS"; internal const string RetentionPeriod = "RETENTION_PERIOD"; internal const string Returns = "RETURNS"; + internal const string Returning = "RETURNING"; internal const string RequestMaxCpuTimeSec = "REQUEST_MAX_CPU_TIME_SEC"; internal const string RequestMaxMemoryGrantPercent = "REQUEST_MAX_MEMORY_GRANT_PERCENT"; internal const string RequestMemoryGrantTimeoutSec = "REQUEST_MEMORY_GRANT_TIMEOUT_SEC"; @@ -906,6 +948,7 @@ internal static class CodeGenerationSupporter internal const string ShrinkDb = "SHRINKDB"; internal const string Sid = "SID"; internal const string Signature = "SIGNATURE"; + internal const string SimilarTo = "SIMILAR_TO"; internal const string Simple = "SIMPLE"; internal const string SingleBlob = "SINGLE_BLOB"; internal const string SingleClob = "SINGLE_CLOB"; @@ -1015,6 +1058,7 @@ internal static class CodeGenerationSupporter internal const string Timer = "TIMER"; internal const string TimeStamp = "TIMESTAMP"; internal const string TinyInt = "TINYINT"; + internal const string TopN = "TOP_N"; internal const string TornPageDetection = "TORN_PAGE_DETECTION"; internal const string TrackCausality = "TRACK_CAUSALITY"; internal const string TrackColumnsUpdated = "TRACK_COLUMNS_UPDATED"; @@ -1049,6 +1093,7 @@ internal static class CodeGenerationSupporter internal const string Unpivot = "UNPIVOT"; internal const string UpdLock = "UPDLOCK"; internal const string Url = "URL"; + internal const string Use = "USE"; internal const string Used = "USED"; internal const string UseIdentity = "USE_IDENTITY"; internal const string UseTypeDefault = "USE_TYPE_DEFAULT"; @@ -1066,6 +1111,7 @@ internal static class CodeGenerationSupporter internal const string Varp = "VARP"; internal const string VDevNo = "VDEVNO"; internal const string Vector = "Vector"; + internal const string VectorSearch = "VECTOR_SEARCH"; internal const string Verbose = "VERBOSE"; internal const string VerboseLogging = "VerboseLogging"; internal const string VerifyOnly = "VERIFYONLY"; @@ -1076,6 +1122,7 @@ internal static class CodeGenerationSupporter internal const string VirtualDevice = "VIRTUAL_DEVICE"; internal const string VStart = "VSTART"; internal const string WaitAtLowPriority = "WAIT_AT_LOW_PRIORITY"; + internal const string WaitStatsCaptureMode = "WAIT_STATS_CAPTURE_MODE"; internal const string WebMethod = "WEBMETHOD"; internal const string WellFormedXml = "WELL_FORMED_XML"; internal const string WideChar = "WIDECHAR"; @@ -1263,7 +1310,7 @@ internal static class CodeGenerationSupporter internal const string Russian = "RUSSIAN"; internal const string Romanian = "ROMANIAN"; internal const string Brazilian = "BRAZILIAN"; - internal const string NorwegianBokmal = "NORWEGIAN (BOKMΕL)"; + internal const string NorwegianBokmal = "NORWEGIAN (BOKMΓ…L)"; internal const string Dutch = "DUTCH"; internal const string Korean = "KOREAN"; internal const string Japanese = "JAPANESE"; diff --git a/SqlScriptDom/Parser/TSql/DatabaseOptionKind.cs b/SqlScriptDom/Parser/TSql/DatabaseOptionKind.cs index 2cdeec3..84d3832 100644 --- a/SqlScriptDom/Parser/TSql/DatabaseOptionKind.cs +++ b/SqlScriptDom/Parser/TSql/DatabaseOptionKind.cs @@ -95,7 +95,11 @@ public enum DatabaseOptionKind // T-SQL 150 On/Off options DataRetention = 67, - Ledger = 68 + Ledger = 68, + + ManualCutover = 69, + PerformCutover = 70, + OptimizedLocking = 71 } #pragma warning restore 1591 diff --git a/SqlScriptDom/Parser/TSql/DatabaseOptionKindHelper.cs b/SqlScriptDom/Parser/TSql/DatabaseOptionKindHelper.cs index 612f3b0..c46471b 100644 --- a/SqlScriptDom/Parser/TSql/DatabaseOptionKindHelper.cs +++ b/SqlScriptDom/Parser/TSql/DatabaseOptionKindHelper.cs @@ -32,6 +32,10 @@ private DatabaseOptionKindHelper() // 140 Options AddOptionMapping(DatabaseOptionKind.AutomaticTuning, CodeGenerationSupporter.AutomaticTuning, SqlVersionFlags.TSql140AndAbove); + + // 170 Options + AddOptionMapping(DatabaseOptionKind.ManualCutover, CodeGenerationSupporter.ManualCutover, SqlVersionFlags.TSql170AndAbove); + AddOptionMapping(DatabaseOptionKind.PerformCutover, CodeGenerationSupporter.PerformCutover, SqlVersionFlags.TSql170AndAbove); } internal static readonly DatabaseOptionKindHelper Instance = new DatabaseOptionKindHelper(); diff --git a/SqlScriptDom/Parser/TSql/EventNotificationEventType.cs b/SqlScriptDom/Parser/TSql/EventNotificationEventType.cs index 6fa1406..221ef2e 100644 --- a/SqlScriptDom/Parser/TSql/EventNotificationEventType.cs +++ b/SqlScriptDom/Parser/TSql/EventNotificationEventType.cs @@ -1091,6 +1091,26 @@ public enum EventNotificationEventType /// DropExternalLanguage = 331, + /// + /// CREATE_JSON_INDEX + /// + CreateJsonIndex = 343, + + /// + /// CREATE_VECTOR_INDEX + /// + CreateVectorIndex = 344, + + /// + /// ADD_INFORMATION_PROTECTION + /// + AddInformationProtection = 345, + + /// + /// DROP_INFORMATION_PROTECTION + /// + DropInformationProtection = 346, + /// /// AUDIT_LOGIN. /// diff --git a/SqlScriptDom/Parser/TSql/ExternalModelTypeOption.cs b/SqlScriptDom/Parser/TSql/ExternalModelTypeOption.cs new file mode 100644 index 0000000..858784e --- /dev/null +++ b/SqlScriptDom/Parser/TSql/ExternalModelTypeOption.cs @@ -0,0 +1,26 @@ +ο»Ώ//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// +//------------------------------------------------------------------------------ +using System; + +namespace Microsoft.SqlServer.TransactSql.ScriptDom +{ +#pragma warning disable 1591 + + /// + /// The enumeration specifies the external model type + /// Currently, we support EMBEDDINGS only. + /// + public enum ExternalModelTypeOption + { + /// + /// MODEL_TYPE = EMBEDDINGS + /// + EMBEDDINGS = 0, + + } + +#pragma warning restore 1591 +} diff --git a/SqlScriptDom/Parser/TSql/IndexOptionHelper.cs b/SqlScriptDom/Parser/TSql/IndexOptionHelper.cs index d552933..4a739a1 100644 --- a/SqlScriptDom/Parser/TSql/IndexOptionHelper.cs +++ b/SqlScriptDom/Parser/TSql/IndexOptionHelper.cs @@ -43,6 +43,11 @@ private IndexOptionHelper() AddOptionMapping(IndexOptionKind.XmlCompression, CodeGenerationSupporter.XmlCompression, SqlVersionFlags.TSql160AndAbove); + AddOptionMapping(IndexOptionKind.VectorMetric, CodeGenerationSupporter.Metric, SqlVersionFlags.TSql170AndAbove); + AddOptionMapping(IndexOptionKind.VectorType, CodeGenerationSupporter.Type, SqlVersionFlags.TSql170AndAbove); + + AddOptionMapping(IndexOptionKind.OptimizeForArraySearch, CodeGenerationSupporter.OptimizeForArraySearch, SqlVersionFlags.TSql170AndAbove); + } internal static readonly IndexOptionHelper Instance = new IndexOptionHelper(); diff --git a/SqlScriptDom/Parser/TSql/IndexOptionKind.cs b/SqlScriptDom/Parser/TSql/IndexOptionKind.cs index 795fa3f..ae69cb5 100644 --- a/SqlScriptDom/Parser/TSql/IndexOptionKind.cs +++ b/SqlScriptDom/Parser/TSql/IndexOptionKind.cs @@ -38,6 +38,9 @@ public enum IndexOptionKind WaitAtLowPriority = 21, OptimizeForSequentialKey = 22, XmlCompression = 23, + VectorMetric = 24, + VectorType = 25, + OptimizeForArraySearch = 26, } #pragma warning restore 1591 diff --git a/SqlScriptDom/Parser/TSql/OptionsHelper.cs b/SqlScriptDom/Parser/TSql/OptionsHelper.cs index 642b9f4..7262ab6 100644 --- a/SqlScriptDom/Parser/TSql/OptionsHelper.cs +++ b/SqlScriptDom/Parser/TSql/OptionsHelper.cs @@ -128,12 +128,14 @@ internal SqlVersionFlags MapSqlVersionToSqlVersionFlags(SqlVersion sqlVersion) return SqlVersionFlags.TSql160; case SqlVersion.Sql170: return SqlVersionFlags.TSql170; + case SqlVersion.SqlFabricDW: + return SqlVersionFlags.TSqlFabricDW; default: throw new ArgumentException(String.Format(CultureInfo.CurrentCulture, SqlScriptGeneratorResource.UnknownEnumValue, sqlVersion, "SqlVersion"), "sqlVersion"); } } - internal OptionType ParseOption(antlr.IToken token, SqlVersionFlags version) + internal virtual OptionType ParseOption(antlr.IToken token, SqlVersionFlags version) { OptionInfo optionInfo; if (_stringToOptionInfo.TryGetValue(token.getText(), out optionInfo) && diff --git a/SqlScriptDom/Parser/TSql/QueryStoreOptionKind.cs b/SqlScriptDom/Parser/TSql/QueryStoreOptionKind.cs index 39d2ed3..6271914 100644 --- a/SqlScriptDom/Parser/TSql/QueryStoreOptionKind.cs +++ b/SqlScriptDom/Parser/TSql/QueryStoreOptionKind.cs @@ -21,7 +21,8 @@ public enum QueryStoreOptionKind Interval_Length_Minutes, Current_Storage_Size_MB, Max_Plans_Per_Query, - Stale_Query_Threshold_Days + Stale_Query_Threshold_Days, + Wait_Stats_Capture_Mode } diff --git a/SqlScriptDom/Parser/TSql/QueryStoreOptionsHelper.cs b/SqlScriptDom/Parser/TSql/QueryStoreOptionsHelper.cs index 2aca14e..17b194e 100644 --- a/SqlScriptDom/Parser/TSql/QueryStoreOptionsHelper.cs +++ b/SqlScriptDom/Parser/TSql/QueryStoreOptionsHelper.cs @@ -26,6 +26,7 @@ private QueryStoreOptionsHelper() AddOptionMapping(QueryStoreOptionKind.Current_Storage_Size_MB, CodeGenerationSupporter.MaxQdsSize); AddOptionMapping(QueryStoreOptionKind.Max_Plans_Per_Query, CodeGenerationSupporter.MaxPlansPerQuery); AddOptionMapping(QueryStoreOptionKind.Stale_Query_Threshold_Days, CodeGenerationSupporter.CleanupPolicy); + AddOptionMapping(QueryStoreOptionKind.Wait_Stats_Capture_Mode, CodeGenerationSupporter.WaitStatsCaptureMode, SqlVersionFlags.TSql140AndAbove); } internal static readonly QueryStoreOptionsHelper Instance = new QueryStoreOptionsHelper(); diff --git a/SqlScriptDom/Parser/TSql/SecurityObjectKind.cs b/SqlScriptDom/Parser/TSql/SecurityObjectKind.cs index 8d72de8..4d1cf17 100644 --- a/SqlScriptDom/Parser/TSql/SecurityObjectKind.cs +++ b/SqlScriptDom/Parser/TSql/SecurityObjectKind.cs @@ -42,6 +42,7 @@ public enum SecurityObjectKind SearchPropertyList = 23, ServerRole = 24, AvailabilityGroup = 25, + ExternalModel = 26, } #pragma warning restore 1591 diff --git a/SqlScriptDom/Parser/TSql/SqlVersionFlags.cs b/SqlScriptDom/Parser/TSql/SqlVersionFlags.cs index 6be3e78..23bb379 100644 --- a/SqlScriptDom/Parser/TSql/SqlVersionFlags.cs +++ b/SqlScriptDom/Parser/TSql/SqlVersionFlags.cs @@ -24,22 +24,25 @@ internal enum SqlVersionFlags TSql150 = 0x80, TSql160 = 0x100, TSql170 = 0x200, + TSqlFabricDW = 0x400, - TSqlAll = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSql170, - TSql90AndAbove = TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSql170, - TSql100AndAbove = TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSql170, - TSql110AndAbove = TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSql170, - TSql120AndAbove = TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSql170, - TSql130AndAbove = TSql130 | TSql140 | TSql150 | TSql160 | TSql170, - TSql140AndAbove = TSql140 | TSql150 | TSql160 | TSql170, - TSql150AndAbove = TSql150 | TSql160 | TSql170, - TSql160AndAbove = TSql160 | TSql170, + TSqlAll = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql90AndAbove = TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql100AndAbove = TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql110AndAbove = TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql120AndAbove = TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql130AndAbove = TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql140AndAbove = TSql140 | TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql150AndAbove = TSql150 | TSql160 | TSqlFabricDW | TSql170, + TSql160AndAbove = TSql160 | TSqlFabricDW | TSql170, + TSql170AndAbove = TSql170, + TSqlFabricDWAndAbove = TSql160 | TSqlFabricDW | TSql170, TSqlUnder110 = TSql80 | TSql90 | TSql100, TSqlUnder120 = TSql80 | TSql90 | TSql100 | TSql110, TSqlUnder130 = TSql80 | TSql90 | TSql100 | TSql110 | TSql120, TSqlUnder140 = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130, TSqlUnder150 = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140, TSqlUnder160 = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150, - TSqlUnder170 = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160, + TSqlUnder170 = TSql80 | TSql90 | TSql100 | TSql110 | TSql120 | TSql130 | TSql140 | TSql150 | TSql160 | TSqlFabricDW, } } \ No newline at end of file diff --git a/SqlScriptDom/Parser/TSql/TSql120ParserBaseInternal.cs b/SqlScriptDom/Parser/TSql/TSql120ParserBaseInternal.cs index 6016f7d..ac8c7d2 100644 --- a/SqlScriptDom/Parser/TSql/TSql120ParserBaseInternal.cs +++ b/SqlScriptDom/Parser/TSql/TSql120ParserBaseInternal.cs @@ -63,7 +63,11 @@ protected static void CheckLowPriorityLockWaitValue(IntegerLiteral maxDuration, protected static void VerifyAllowedIndexOption120(IndexAffectingStatement statement, IndexOption option) { VerifyAllowedIndexOption(statement, option, SqlVersionFlags.TSql120); + VerifyAllowedOnlineIndexOptionLowPriorityLockWait(statement, option, SqlVersionFlags.TSql120); + } + protected static void VerifyAllowedOnlineIndexOptionLowPriorityLockWait(IndexAffectingStatement statement, IndexOption option, SqlVersionFlags versionFlags) + { // for a low priority lock wait (MLP) option, check if it is allowed for the statement. // if (option is OnlineIndexOption) @@ -71,6 +75,7 @@ protected static void VerifyAllowedIndexOption120(IndexAffectingStatement statem OnlineIndexOption onlineIndexOption = option as OnlineIndexOption; if (onlineIndexOption.LowPriorityLockWaitOption != null) { + // This syntax for CREATE INDEX currently applies to SQL Server 2022 (16.x), Azure SQL Database, and Azure SQL Managed Instance only. For ALTER INDEX, this syntax applies to SQL Server (Starting with SQL Server 2014 (12.x)) and Azure SQL Database. switch (statement) { case IndexAffectingStatement.AlterIndexRebuildOnePartition: @@ -81,6 +86,19 @@ protected static void VerifyAllowedIndexOption120(IndexAffectingStatement statem // break; + case IndexAffectingStatement.CreateIndex: + // allowed in Sql160 and higher only + // + if (versionFlags > SqlVersionFlags.TSql150) + { + break; + } + else + { + ThrowWrongIndexOptionError(statement, onlineIndexOption.LowPriorityLockWaitOption); + break; + } + default: // WAIT_AT_LOW_PRIORITY is not a valid index option in the statement // diff --git a/SqlScriptDom/Parser/TSql/TSql130.g b/SqlScriptDom/Parser/TSql/TSql130.g index a299def..a51aeb9 100644 --- a/SqlScriptDom/Parser/TSql/TSql130.g +++ b/SqlScriptDom/Parser/TSql/TSql130.g @@ -7289,16 +7289,14 @@ eventDeclarationComparisonPredicate [BooleanComparisonExpression vParent, EventS BooleanComparisonType vType = BooleanComparisonType.Equals; ScalarExpression eventValue; } - : vType = comparisonOperator eventValue = eventDeclarationValue - { - vSourceDeclaration.Value = vSource; - vParent.FirstExpression = vSourceDeclaration; - vParent.ComparisonType = vType; - vParent.SecondExpression = eventValue; - } - ; - -dropEventDeclarationList [AlterEventSessionStatement vParent] + : (vType = comparisonOperator | {LA(2) == Like}? tNot:Not tLike:Like { vType = BooleanComparisonType.NotLike; }) eventValue = eventDeclarationValue + { + vSourceDeclaration.Value = vSource; + vParent.FirstExpression = vSourceDeclaration; + vParent.ComparisonType = vType; + vParent.SecondExpression = eventValue; + } + ;dropEventDeclarationList [AlterEventSessionStatement vParent] { EventSessionObjectName vDropEventDeclaration; } diff --git a/SqlScriptDom/Parser/TSql/TSql140.g b/SqlScriptDom/Parser/TSql/TSql140.g index 08042c5..3139191 100644 --- a/SqlScriptDom/Parser/TSql/TSql140.g +++ b/SqlScriptDom/Parser/TSql/TSql140.g @@ -3144,6 +3144,9 @@ queryStoreOneOption returns [QueryStoreOption vResult = null] | {NextTokenMatches(CodeGenerationSupporter.CleanupPolicy)}? vResult = queryStoreTimeCleanupPolicy + | + {NextTokenMatches(CodeGenerationSupporter.WaitStatsCaptureMode)}? + vResult = queryStoreWaitStatsCaptureOption ; queryStoreDesiredStateOption returns [QueryStoreDesiredStateOption vResult = FragmentFactory.CreateFragment()] @@ -3326,6 +3329,30 @@ queryStoreTimeCleanupPolicy returns [QueryStoreTimeCleanupPolicyOption vResult = } ; +queryStoreWaitStatsCaptureOption returns [QueryStoreWaitStatsCaptureOption vResult = FragmentFactory.CreateFragment()] + : tWaitStatsCaptureMode:Identifier + { + Match(tWaitStatsCaptureMode, CodeGenerationSupporter.WaitStatsCaptureMode); + vResult.OptionKind = QueryStoreOptionKind.Wait_Stats_Capture_Mode; + UpdateTokenInfo(vResult, tWaitStatsCaptureMode); + } + ( + (EqualsSign tOff:Off + { + vResult.OptionState = OptionState.Off; + UpdateTokenInfo(vResult, tOff); + } + ) + | + (EqualsSign tOn:On + { + vResult.OptionState = OptionState.On; + UpdateTokenInfo(vResult, tOn); + } + ) + ) + ; + automaticTuningDbOption returns [AutomaticTuningDatabaseOption vResult = FragmentFactory.CreateFragment()] : tAutomaticTuning:Identifier { @@ -7648,16 +7675,14 @@ eventDeclarationComparisonPredicate [BooleanComparisonExpression vParent, EventS BooleanComparisonType vType = BooleanComparisonType.Equals; ScalarExpression eventValue; } - : vType = comparisonOperator eventValue = eventDeclarationValue - { - vSourceDeclaration.Value = vSource; - vParent.FirstExpression = vSourceDeclaration; - vParent.ComparisonType = vType; - vParent.SecondExpression = eventValue; - } - ; - -dropEventDeclarationList [AlterEventSessionStatement vParent] + : (vType = comparisonOperator | {LA(2) == Like}? tNot:Not tLike:Like { vType = BooleanComparisonType.NotLike; }) eventValue = eventDeclarationValue + { + vSourceDeclaration.Value = vSource; + vParent.FirstExpression = vSourceDeclaration; + vParent.ComparisonType = vType; + vParent.SecondExpression = eventValue; + } + ;dropEventDeclarationList [AlterEventSessionStatement vParent] { EventSessionObjectName vDropEventDeclaration; } diff --git a/SqlScriptDom/Parser/TSql/TSql150.g b/SqlScriptDom/Parser/TSql/TSql150.g index d04b752..35a14f1 100644 --- a/SqlScriptDom/Parser/TSql/TSql150.g +++ b/SqlScriptDom/Parser/TSql/TSql150.g @@ -3667,6 +3667,9 @@ queryStoreOneOption returns [QueryStoreOption vResult = null] | {NextTokenMatches(CodeGenerationSupporter.CleanupPolicy)}? vResult = queryStoreTimeCleanupPolicy + | + {NextTokenMatches(CodeGenerationSupporter.WaitStatsCaptureMode)}? + vResult = queryStoreWaitStatsCaptureOption ; queryStoreDesiredStateOption returns [QueryStoreDesiredStateOption vResult = FragmentFactory.CreateFragment()] @@ -3894,6 +3897,30 @@ automaticTuningDbOption returns [AutomaticTuningDatabaseOption vResult = Fragmen ) ; +queryStoreWaitStatsCaptureOption returns [QueryStoreWaitStatsCaptureOption vResult = FragmentFactory.CreateFragment()] + : tWaitStatsCaptureMode:Identifier + { + Match(tWaitStatsCaptureMode, CodeGenerationSupporter.WaitStatsCaptureMode); + vResult.OptionKind = QueryStoreOptionKind.Wait_Stats_Capture_Mode; + UpdateTokenInfo(vResult, tWaitStatsCaptureMode); + } + ( + (EqualsSign tOff:Off + { + vResult.OptionState = OptionState.Off; + UpdateTokenInfo(vResult, tOff); + } + ) + | + (EqualsSign tOn:On + { + vResult.OptionState = OptionState.On; + UpdateTokenInfo(vResult, tOn); + } + ) + ) + ; + automaticTuningOptions [AutomaticTuningDatabaseOption vParent] { AutomaticTuningOption vAutomaticTuningOption; @@ -8200,16 +8227,14 @@ eventDeclarationComparisonPredicate [BooleanComparisonExpression vParent, EventS BooleanComparisonType vType = BooleanComparisonType.Equals; ScalarExpression eventValue; } - : vType = comparisonOperator eventValue = eventDeclarationValue - { - vSourceDeclaration.Value = vSource; - vParent.FirstExpression = vSourceDeclaration; - vParent.ComparisonType = vType; - vParent.SecondExpression = eventValue; - } - ; - -dropEventDeclarationList [AlterEventSessionStatement vParent] + : (vType = comparisonOperator | {LA(2) == Like}? tNot:Not tLike:Like { vType = BooleanComparisonType.NotLike; }) eventValue = eventDeclarationValue + { + vSourceDeclaration.Value = vSource; + vParent.FirstExpression = vSourceDeclaration; + vParent.ComparisonType = vType; + vParent.SecondExpression = eventValue; + } + ;dropEventDeclarationList [AlterEventSessionStatement vParent] { EventSessionObjectName vDropEventDeclaration; } diff --git a/SqlScriptDom/Parser/TSql/TSql150ParserBaseInternal.cs b/SqlScriptDom/Parser/TSql/TSql150ParserBaseInternal.cs index 9c6b48c..66cd1a0 100644 --- a/SqlScriptDom/Parser/TSql/TSql150ParserBaseInternal.cs +++ b/SqlScriptDom/Parser/TSql/TSql150ParserBaseInternal.cs @@ -49,36 +49,9 @@ public TSql150ParserBaseInternal(bool initialQuotedIdentifiersOn) protected static void VerifyAllowedIndexOption150(IndexAffectingStatement statement, IndexOption option) { VerifyAllowedIndexOption(statement, option, SqlVersionFlags.TSql150); - VerifyAllowedOnlineIndexOptionLowPriorityLockWait(statement, option); + VerifyAllowedOnlineIndexOptionLowPriorityLockWait(statement, option, SqlVersionFlags.TSql150); } - protected static void VerifyAllowedOnlineIndexOptionLowPriorityLockWait(IndexAffectingStatement statement, IndexOption option) - { - // for a low priority lock wait (MLP) option, check if it is allowed for the statement. - // - if (option is OnlineIndexOption) - { - OnlineIndexOption onlineIndexOption = option as OnlineIndexOption; - if (onlineIndexOption.LowPriorityLockWaitOption != null) - { - switch (statement) - { - case IndexAffectingStatement.AlterIndexRebuildOnePartition: - case IndexAffectingStatement.AlterTableRebuildOnePartition: - case IndexAffectingStatement.AlterIndexRebuildAllPartitions: - case IndexAffectingStatement.AlterTableRebuildAllPartitions: - // allowed - // - break; - - default: - // WAIT_AT_LOW_PRIORITY is not a valid index option in the statement - // - ThrowWrongIndexOptionError(statement, onlineIndexOption.LowPriorityLockWaitOption); - break; - } - } - } - } + } } diff --git a/SqlScriptDom/Parser/TSql/TSql160.g b/SqlScriptDom/Parser/TSql/TSql160.g index ea2694c..4d794ca 100644 --- a/SqlScriptDom/Parser/TSql/TSql160.g +++ b/SqlScriptDom/Parser/TSql/TSql160.g @@ -3687,6 +3687,9 @@ queryStoreOneOption returns [QueryStoreOption vResult = null] | {NextTokenMatches(CodeGenerationSupporter.CleanupPolicy)}? vResult = queryStoreTimeCleanupPolicy + | + {NextTokenMatches(CodeGenerationSupporter.WaitStatsCaptureMode)}? + vResult = queryStoreWaitStatsCaptureOption ; queryStoreDesiredStateOption returns [QueryStoreDesiredStateOption vResult = FragmentFactory.CreateFragment()] @@ -3869,6 +3872,30 @@ queryStoreTimeCleanupPolicy returns [QueryStoreTimeCleanupPolicyOption vResult = } ; +queryStoreWaitStatsCaptureOption returns [QueryStoreWaitStatsCaptureOption vResult = FragmentFactory.CreateFragment()] + : tWaitStatsCaptureMode:Identifier + { + Match(tWaitStatsCaptureMode, CodeGenerationSupporter.WaitStatsCaptureMode); + vResult.OptionKind = QueryStoreOptionKind.Wait_Stats_Capture_Mode; + UpdateTokenInfo(vResult, tWaitStatsCaptureMode); + } + ( + (EqualsSign tOff:Off + { + vResult.OptionState = OptionState.Off; + UpdateTokenInfo(vResult, tOff); + } + ) + | + (EqualsSign tOn:On + { + vResult.OptionState = OptionState.On; + UpdateTokenInfo(vResult, tOn); + } + ) + ) + ; + automaticTuningDbOption returns [AutomaticTuningDatabaseOption vResult = FragmentFactory.CreateFragment()] : tAutomaticTuning:Identifier { @@ -8225,16 +8252,14 @@ eventDeclarationComparisonPredicate [BooleanComparisonExpression vParent, EventS BooleanComparisonType vType = BooleanComparisonType.Equals; ScalarExpression eventValue; } - : vType = comparisonOperator eventValue = eventDeclarationValue - { - vSourceDeclaration.Value = vSource; - vParent.FirstExpression = vSourceDeclaration; - vParent.ComparisonType = vType; - vParent.SecondExpression = eventValue; - } - ; - -dropEventDeclarationList [AlterEventSessionStatement vParent] + : (vType = comparisonOperator | {LA(2) == Like}? tNot:Not tLike:Like { vType = BooleanComparisonType.NotLike; }) eventValue = eventDeclarationValue + { + vSourceDeclaration.Value = vSource; + vParent.FirstExpression = vSourceDeclaration; + vParent.ComparisonType = vType; + vParent.SecondExpression = eventValue; + } + ;dropEventDeclarationList [AlterEventSessionStatement vParent] { EventSessionObjectName vDropEventDeclaration; } @@ -31451,6 +31476,12 @@ expressionPrimary [ExpressionFlags expressionFlags] returns [PrimaryExpression v | {NextTokenMatches(CodeGenerationSupporter.IIf) && (LA(2) == LeftParenthesis)}? vResult=iIfCall + | + {NextTokenMatches(CodeGenerationSupporter.JsonObject) && (LA(2) == LeftParenthesis)}? + vResult=jsonObjectCall + | + {NextTokenMatches(CodeGenerationSupporter.JsonArray) && (LA(2) == LeftParenthesis)}? + vResult=jsonArrayCall | (Identifier LeftParenthesis)=> vResult=builtInFunctionCall @@ -31819,15 +31850,31 @@ jsonKeyValueExpression returns [JsonKeyValue vResult = FragmentFactory.CreateFra : ( vKey=expression - { - vResult.JsonKeyName=vKey; - } + { + vResult.JsonKeyName=vKey; + } Colon vValue=expression - { - vResult.JsonValue=vValue; + { + vResult.JsonValue=vValue; } - ) - ; + + | + + label:Label + { + var identifier = this.FragmentFactory.CreateFragment(); + var multiPartIdentifier = this.FragmentFactory.CreateFragment(); + var columnRef = this.FragmentFactory.CreateFragment(); + CreateIdentifierFromLabel(label, identifier, multiPartIdentifier); + columnRef.MultiPartIdentifier = multiPartIdentifier; + vResult.JsonKeyName=columnRef; + } + vValue=expression + { + vResult.JsonValue=vValue; + } + ) + ; windowClause returns [WindowClause vResult = FragmentFactory.CreateFragment()] { @@ -32455,6 +32502,32 @@ iIfCall returns [IIfCall vResult = this.FragmentFactory.CreateFragment( } ; +jsonObjectCall returns [FunctionCall vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : vIdentifier=nonQuotedIdentifier + { + Match(vIdentifier, CodeGenerationSupporter.JsonObject); + vResult.FunctionName = vIdentifier; + } + LeftParenthesis + jsonObjectBuiltInFunctionCall[vResult] + ; + +jsonArrayCall returns [FunctionCall vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : vIdentifier=nonQuotedIdentifier + { + Match(vIdentifier, CodeGenerationSupporter.JsonArray); + vResult.FunctionName = vIdentifier; + } + LeftParenthesis + jsonArrayBuiltInFunctionCall[vResult] + ; + coalesceExpression [ExpressionFlags expressionFlags] returns [CoalesceExpression vResult = this.FragmentFactory.CreateFragment()] { ScalarExpression vExpression; diff --git a/SqlScriptDom/Parser/TSql/TSql160ParserBaseInternal.cs b/SqlScriptDom/Parser/TSql/TSql160ParserBaseInternal.cs index 4dd2554..8d49eb1 100644 --- a/SqlScriptDom/Parser/TSql/TSql160ParserBaseInternal.cs +++ b/SqlScriptDom/Parser/TSql/TSql160ParserBaseInternal.cs @@ -53,7 +53,7 @@ public TSql160ParserBaseInternal(bool initialQuotedIdentifiersOn) protected static void VerifyAllowedIndexOption160(IndexAffectingStatement statement, IndexOption option) { VerifyAllowedIndexOption(statement, option, SqlVersionFlags.TSql160); - VerifyAllowedOnlineIndexOptionLowPriorityLockWait(statement, option); + VerifyAllowedOnlineIndexOptionLowPriorityLockWait(statement, option, SqlVersionFlags.TSql160); } protected static SqlDataTypeOption ParseDataType160(string token) diff --git a/SqlScriptDom/Parser/TSql/TSql170.g b/SqlScriptDom/Parser/TSql/TSql170.g index b6ba097..85ec9cf 100644 --- a/SqlScriptDom/Parser/TSql/TSql170.g +++ b/SqlScriptDom/Parser/TSql/TSql170.g @@ -883,6 +883,12 @@ create2005Statements returns [TSqlStatement vResult = null] | {NextTokenMatches(CodeGenerationSupporter.ColumnStore)}? vResult=createColumnStoreIndexStatement[null, null] + | + {NextTokenMatches(CodeGenerationSupporter.Json)}? + vResult=createJsonIndexStatement[null, null] + | + {NextTokenMatches(CodeGenerationSupporter.Vector)}? + vResult=createVectorIndexStatement[null, null] | {NextTokenMatches(CodeGenerationSupporter.Contract)}? vResult=createContractStatement @@ -900,7 +906,7 @@ create2005Statements returns [TSqlStatement vResult = null] vResult=createEventStatement // NOTIFICATION or SESSION | {NextTokenMatches(CodeGenerationSupporter.External)}? - vResult=createExternalStatements // EXTERNAL DATA SOURCE, FILE FORMAT, STREAM, TABLE, RESOURCE POOL, LIBRARY, LANGUAGE + vResult=createExternalStatements // EXTERNAL DATA SOURCE, FILE FORMAT, STREAM, TABLE, RESOURCE POOL, LIBRARY, LANGUAGE, MODEL | {NextTokenMatches(CodeGenerationSupporter.Fulltext)}? vResult=createFulltextStatement // Index or CATALOG @@ -2896,6 +2902,7 @@ alterDatabase [IToken tAlter] returns [AlterDatabaseStatement vResult = null] | vResult = alterDbSet | vResult = alterDbCollate | vResult = alterDbRebuild // Undocumented - for PSS only + | vResult = alterDbPerformCutover ) { if(vUseCurrent) @@ -3059,6 +3066,14 @@ alterDbModify returns [AlterDatabaseStatement vResult = null] alterDbModifyAzureOptions returns [AlterDatabaseSetStatement vResult = FragmentFactory.CreateFragment()] : azureOptions[vResult, vResult.Options] + ( + With tManualCutover:Identifier + { + Match(tManualCutover, CodeGenerationSupporter.ManualCutover); + vResult.WithManualCutover = true; + UpdateTokenInfo(vResult, tManualCutover); + } + )? ; // MODIFY File syntax @@ -3143,6 +3158,14 @@ toFilegroup returns [Identifier vResult] } ; +alterDbPerformCutover returns [AlterDatabasePerformCutoverStatement vResult = FragmentFactory.CreateFragment()] + : tPerformCutover:Identifier + { + Match(tPerformCutover, CodeGenerationSupporter.PerformCutover); + UpdateTokenInfo(vResult, tPerformCutover); + } + ; + xactTermination returns [AlterDatabaseTermination vResult = FragmentFactory.CreateFragment()] { Literal vInteger; @@ -3221,6 +3244,8 @@ dbOptionStateItem[ref ulong encounteredOptions] returns [DatabaseOption vResult vResult = changeTrackingDbOption | {NextTokenMatches(CodeGenerationSupporter.AcceleratedDatabaseRecovery)}? vResult = acceleratedDatabaseRecoveryOption + | {NextTokenMatches(CodeGenerationSupporter.OptimizedLocking)}? + vResult = optimizedLockingOption | {NextTokenMatches(CodeGenerationSupporter.Containment)}? vResult = dbContainmentOption | {NextTokenMatches(CodeGenerationSupporter.Hadr)}? @@ -3534,6 +3559,30 @@ acceleratedDatabaseRecoveryOption returns [AcceleratedDatabaseRecoveryDatabaseOp ) ; +optimizedLockingOption returns [OptimizedLockingDatabaseOption vResult = FragmentFactory.CreateFragment()] + : tOptimizedLocking:Identifier + { + Match(tOptimizedLocking, CodeGenerationSupporter.OptimizedLocking); + vResult.OptionKind = DatabaseOptionKind.OptimizedLocking; + UpdateTokenInfo(vResult, tOptimizedLocking); + } + ( + (EqualsSign tOff:Off + { + vResult.OptionState = OptionState.Off; + UpdateTokenInfo(vResult, tOff); + } + ) + | + (EqualsSign tOn:On + { + vResult.OptionState = OptionState.On; + UpdateTokenInfo(vResult, tOn); + } + ) + ) + ; + changeTrackingOnOptions [ChangeTrackingDatabaseOption vParent] { bool autoCleanupEncountered = false; @@ -3687,6 +3736,9 @@ queryStoreOneOption returns [QueryStoreOption vResult = null] | {NextTokenMatches(CodeGenerationSupporter.CleanupPolicy)}? vResult = queryStoreTimeCleanupPolicy + | + {NextTokenMatches(CodeGenerationSupporter.WaitStatsCaptureMode)}? + vResult = queryStoreWaitStatsCaptureOption ; queryStoreDesiredStateOption returns [QueryStoreDesiredStateOption vResult = FragmentFactory.CreateFragment()] @@ -3869,6 +3921,30 @@ queryStoreTimeCleanupPolicy returns [QueryStoreTimeCleanupPolicyOption vResult = } ; +queryStoreWaitStatsCaptureOption returns [QueryStoreWaitStatsCaptureOption vResult = FragmentFactory.CreateFragment()] + : tWaitStatsCaptureMode:Identifier + { + Match(tWaitStatsCaptureMode, CodeGenerationSupporter.WaitStatsCaptureMode); + vResult.OptionKind = QueryStoreOptionKind.Wait_Stats_Capture_Mode; + UpdateTokenInfo(vResult, tWaitStatsCaptureMode); + } + ( + (EqualsSign tOff:Off + { + vResult.OptionState = OptionState.Off; + UpdateTokenInfo(vResult, tOff); + } + ) + | + (EqualsSign tOn:On + { + vResult.OptionState = OptionState.On; + UpdateTokenInfo(vResult, tOn); + } + ) + ) + ; + automaticTuningDbOption returns [AutomaticTuningDatabaseOption vResult = FragmentFactory.CreateFragment()] : tAutomaticTuning:Identifier { @@ -4246,8 +4322,8 @@ alterDatabaseEncryptionKey [IToken tAlter] returns [AlterDatabaseEncryptionKeySt addSensitivityClassificationStatement returns [AddSensitivityClassificationStatement vResult = this.FragmentFactory.CreateFragment()] { ColumnReferenceExpression vColumn; - SensitivityClassificationOption vOption; - long encounteredOptions = 0; + SensitivityClassificationOption vOption; + long encounteredOptions = 0; } : tSensitivity:Identifier tClassification:Identifier To { @@ -4256,24 +4332,24 @@ addSensitivityClassificationStatement returns [AddSensitivityClassificationState } (vColumn = column { - CheckTableNameExistsForColumn(vColumn, true); + CheckTableNameExistsForColumn(vColumn, true); AddAndUpdateTokenInfo(vResult, vResult.Columns, vColumn); } (Comma vColumn = column { - CheckTableNameExistsForColumn(vColumn, true); + CheckTableNameExistsForColumn(vColumn, true); AddAndUpdateTokenInfo(vResult, vResult.Columns, vColumn); } )* ) - With LeftParenthesis vOption = sensitivityClassificationOption + With LeftParenthesis vOption = sensitivityClassificationOption { - CheckOptionDuplication(ref encounteredOptions, (int)vOption.Type, vOption); + CheckOptionDuplication(ref encounteredOptions, (int)vOption.Type, vOption); AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); } (Comma vOption = sensitivityClassificationOption { - CheckOptionDuplication(ref encounteredOptions, (int)vOption.Type, vOption); + CheckOptionDuplication(ref encounteredOptions, (int)vOption.Type, vOption); AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); } )* @@ -4304,10 +4380,10 @@ sensitivityClassificationOption returns [SensitivityClassificationOption vResult break; } - vResult = FragmentFactory.CreateFragment(); + vResult = FragmentFactory.CreateFragment(); vResult.Value = vSensitivityValue; - vResult.Type = optionType; + vResult.Type = optionType; UpdateTokenInfo(vResult, tOption); } @@ -4356,17 +4432,17 @@ dropSensitivityClassificationStatement returns [DropSensitivityClassificationSta } (vColumn = column { - CheckTableNameExistsForColumn(vColumn, true); + CheckTableNameExistsForColumn(vColumn, true); AddAndUpdateTokenInfo(vResult, vResult.Columns, vColumn); } (Comma vColumn = column { - CheckTableNameExistsForColumn(vColumn, true); + CheckTableNameExistsForColumn(vColumn, true); AddAndUpdateTokenInfo(vResult, vResult.Columns, vColumn); } )* ) - ; + ; ////////////////////////////////////////////////////////////////////// // Create Database @@ -6213,7 +6289,7 @@ simpleBulkInsertOptionWithValue returns [LiteralBulkInsertOption vResult = Fragm | iValue = identifier { vResult.OptionKind = BulkInsertStringOptionsHelper.Instance.ParseOption(tOption, SqlVersionFlags.TSql150); - UpdateTokenInfo(vResult, tOption); + UpdateTokenInfo(vResult, tOption); if (vResult.OptionKind == BulkInsertOptionKind.HeaderRow) if(!TryMatch(iValue, CodeGenerationSupporter.True)) Match(iValue, CodeGenerationSupporter.False); @@ -8225,16 +8301,14 @@ eventDeclarationComparisonPredicate [BooleanComparisonExpression vParent, EventS BooleanComparisonType vType = BooleanComparisonType.Equals; ScalarExpression eventValue; } - : vType = comparisonOperator eventValue = eventDeclarationValue - { - vSourceDeclaration.Value = vSource; - vParent.FirstExpression = vSourceDeclaration; - vParent.ComparisonType = vType; - vParent.SecondExpression = eventValue; - } - ; - -dropEventDeclarationList [AlterEventSessionStatement vParent] + : (vType = comparisonOperator | {LA(2) == Like}? tNot:Not tLike:Like { vType = BooleanComparisonType.NotLike; }) eventValue = eventDeclarationValue + { + vSourceDeclaration.Value = vSource; + vParent.FirstExpression = vSourceDeclaration; + vParent.ComparisonType = vType; + vParent.SecondExpression = eventValue; + } + ;dropEventDeclarationList [AlterEventSessionStatement vParent] { EventSessionObjectName vDropEventDeclaration; } @@ -8894,6 +8968,9 @@ createExternalStatements returns [TSqlStatement vResult = null] | {NextTokenMatches(CodeGenerationSupporter.Stream)}? vResult = createExternalStreamStatement + | + {NextTokenMatches(CodeGenerationSupporter.Model)}? + vResult = createExternalModelStatement ) ; @@ -8911,6 +8988,9 @@ alterExternalStatements returns [TSqlStatement vResult = null] | {NextTokenMatches(CodeGenerationSupporter.Language)}? vResult = alterExternalLanguageStatement + | + {NextTokenMatches(CodeGenerationSupporter.Model)}? + vResult = alterExternalModelStatement ) ; @@ -13892,7 +13972,7 @@ securityTargetObjectCommon[SecurityTargetObject vParent] ( vIdentifier2=securityStatementPermission { - vParent.ObjectKind = ParseSecurityObjectKind(vIdentifier1, vIdentifier2); + vParent.ObjectKind = ParseSecurityObjectKindTSql170(vIdentifier1, vIdentifier2); } | vIdentifier2=securityStatementPermission vIdentifier3=securityStatementPermission @@ -14715,9 +14795,9 @@ dropStatements returns [TSqlStatement vResult] | {NextTokenMatches(CodeGenerationSupporter.Column)}? vResult = dropColumnStatements | {NextTokenMatches(CodeGenerationSupporter.External)}? - vResult = dropExternalStatement // EXTERNAL DATA SOURCE, FILE FORMAT, TABLE or RESOURCE POOL - | {NextTokenMatches(CodeGenerationSupporter.Sensitivity)}? - vResult = dropSensitivityClassificationStatement + vResult = dropExternalStatement // EXTERNAL DATA SOURCE, FILE FORMAT, TABLE , MODEL or RESOURCE POOL + | {NextTokenMatches(CodeGenerationSupporter.Sensitivity)}? + vResult = dropSensitivityClassificationStatement | vResult = dropServerStatements | vResult = dropUserStatement ) @@ -15609,6 +15689,9 @@ dropExternalStatement returns [TSqlStatement vResult = null] | {NextTokenMatches(CodeGenerationSupporter.Resource)}? vResult = dropExternalResourcePoolStatement + | + {NextTokenMatches(CodeGenerationSupporter.Model)}? + vResult = dropExternalModelStatement ) ; @@ -16797,6 +16880,8 @@ createIndexStatement returns [TSqlStatement vResult = null] ( vResult=createRelationalIndexStatement[tUnique, isClustered] | vResult=createColumnStoreIndexStatement[tUnique, isClustered] + | vResult=createJsonIndexStatement[tUnique, isClustered] + | vResult=createVectorIndexStatement[tUnique, isClustered] ) ) | @@ -16897,13 +16982,13 @@ createColumnStoreIndexStatement [IToken tUnique, bool? isClustered] returns [Cre ( identifierColumnList[vResult, vResult.OrderedColumns] { - foreach (var col in vResult.OrderedColumns) - { - if (PseudoColumnHelper.IsGraphPseudoColumn(col)) - { + foreach (var col in vResult.OrderedColumns) + { + if (PseudoColumnHelper.IsGraphPseudoColumn(col)) + { ThrowIncorrectSyntaxErrorException(col); } - } + } } ) )? @@ -16933,6 +17018,102 @@ createColumnStoreIndexStatement [IToken tUnique, bool? isClustered] returns [Cre )? ; +createJsonIndexStatement [IToken tUnique, bool? isClustered] returns [CreateJsonIndexStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + SchemaObjectName vSchemaObjectName; + Identifier vJsonColumn; + StringLiteral vPath; + + if (tUnique != null) + { + ThrowIncorrectSyntaxErrorException(tUnique); + } + if (isClustered.HasValue) + { + ThrowIncorrectSyntaxErrorException(LT(1)); + } +} + : tJson:Identifier tIndex:Index vIdentifier=identifier + { + Match(tJson, CodeGenerationSupporter.Json); + vResult.Name = vIdentifier; + } + tOn:On vSchemaObjectName=schemaObjectThreePartName + { + vResult.OnName = vSchemaObjectName; + } + LeftParenthesis vJsonColumn=identifier tRParen:RightParenthesis + { + vResult.JsonColumn = vJsonColumn; + UpdateTokenInfo(vResult, tRParen); + } + ( + tFor:For LeftParenthesis + vPath=stringLiteral + { + AddAndUpdateTokenInfo(vResult, vResult.ForJsonPaths, vPath); + } + ( + Comma vPath=stringLiteral + { + AddAndUpdateTokenInfo(vResult, vResult.ForJsonPaths, vPath); + } + )* + RightParenthesis + )? + ( + // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + With + indexOptionList[IndexAffectingStatement.CreateIndex, vResult.IndexOptions, vResult] + )? + ; + +createVectorIndexStatement [IToken tUnique, bool? isClustered] returns [CreateVectorIndexStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + SchemaObjectName vSchemaObjectName; + Identifier vVectorColumn; + FileGroupOrPartitionScheme vFileGroupOrPartitionScheme; + + if (tUnique != null) + { + ThrowIncorrectSyntaxErrorException(tUnique); + } + if (isClustered.HasValue) + { + ThrowIncorrectSyntaxErrorException(LT(1)); + } +} + : tVector:Identifier tIndex:Index vIdentifier=identifier + { + Match(tVector, CodeGenerationSupporter.Vector); + vResult.Name = vIdentifier; + } + tOn:On vSchemaObjectName=schemaObjectThreePartName + { + vResult.OnName = vSchemaObjectName; + } + LeftParenthesis vVectorColumn=identifier tRParen:RightParenthesis + { + vResult.VectorColumn = vVectorColumn; + UpdateTokenInfo(vResult, tRParen); + } + ( + // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + With + indexOptionList[IndexAffectingStatement.CreateIndex, vResult.IndexOptions, vResult] + )? + ( + On vFileGroupOrPartitionScheme=filegroupOrPartitionScheme + { + vResult.OnFileGroupOrPartitionScheme = vFileGroupOrPartitionScheme; + } + )? + ; + indexKeyColumnList[CreateIndexStatement vParent] { ColumnWithSortOrder vColumnWithSortOrder; @@ -17033,9 +17214,9 @@ filterExpressionPrimary returns [BooleanExpression vResult] vExpression = filterColumn ( Is - ( - vResult = filterNullPredicate[vExpression] - | + ( + vResult = filterNullPredicate[vExpression] + | ( Not { @@ -17048,7 +17229,7 @@ filterExpressionPrimary returns [BooleanExpression vResult] | vResult = filterDistinctPredicate[vExpression, vNotDefined] ) - ) + ) | vResult = filterComparisonPredicate[vExpression] | vResult = filterInPredicate[vExpression] ) @@ -17101,9 +17282,9 @@ filterDistinctPredicate[ScalarExpression vColumn, bool vNotDefined] returns [Dis ScalarExpression vExpression; } : - vExpression = expression + vExpression = expression { - vResult.FirstExpression = vColumn; + vResult.FirstExpression = vColumn; vResult.SecondExpression = vExpression; vResult.IsNot = vNotDefined; } @@ -17111,9 +17292,9 @@ filterDistinctPredicate[ScalarExpression vColumn, bool vNotDefined] returns [Dis filterNullPredicateFromDistinctPredicate[ScalarExpression vColumn, bool vNotDefined] returns [BooleanIsNullExpression vResult = this.FragmentFactory.CreateFragment()] : - tNull:Null + tNull:Null { - vResult.IsNot = vNotDefined; + vResult.IsNot = vNotDefined; vResult.Expression = vColumn; UpdateTokenInfo(vResult,tNull); } @@ -17577,6 +17758,12 @@ indexOption returns [IndexOption vResult = null] | {NextTokenMatches(CodeGenerationSupporter.WaitAtLowPriority)}? vResult=waitAtLowPriorityOption + | + {NextTokenMatches(CodeGenerationSupporter.Metric)}? + vResult=vectorMetricOption + | + {NextTokenMatches(CodeGenerationSupporter.Type)}? + vResult=vectorTypeOption | vResult=indexStateOption ; @@ -18955,9 +19142,21 @@ joinElement[SubDmlFlags subDmlFlags, ref TableReference vResult] ; selectTableReferenceElement [SubDmlFlags subDmlFlags] returns [TableReference vResult = null] +{ + IToken tAfterJoinParenthesis = null; +} : + // Apply SaveGuessing optimization ONLY when VECTOR keyword is detected in lookahead + // This fixes VECTOR parsing in deeply nested JOINs without breaking other valid SQL patterns + {ContainsVectorInLookahead()}? + ({ if (!SkipGuessing(tAfterJoinParenthesis)) }: + (joinParenthesis[subDmlFlags])=> ({ SaveGuessing(out tAfterJoinParenthesis); }:))=> + ({ if (!SkipGuessing(tAfterJoinParenthesis)) }: + vResult=joinParenthesis[subDmlFlags]) + | + // Standard syntactic predicate for all other cases (joinParenthesis[subDmlFlags])=> - vResult=joinParenthesis[subDmlFlags] + vResult=joinParenthesis[subDmlFlags] | vResult=selectTableReferenceElementWithoutJoinParenthesis[subDmlFlags] ; @@ -18965,8 +19164,10 @@ selectTableReferenceElementWithoutJoinParenthesis[SubDmlFlags subDmlFlags] retur : {NextTokenMatches(CodeGenerationSupporter.ChangeTable)}? vResult=changeTableTableReference + | {NextTokenMatches(CodeGenerationSupporter.AiGenerateChunks)}? + vResult = aiGenerateChunksTableReference | vResult=builtInFunctionTableReference - | {NextIdentifierMatchesOneOf(new string[] {CodeGenerationSupporter.StringSplit, CodeGenerationSupporter.GenerateSeries})}? + | {NextIdentifierMatchesOneOf(new string[] {CodeGenerationSupporter.StringSplit, CodeGenerationSupporter.GenerateSeries, CodeGenerationSupporter.RegexpMatches, CodeGenerationSupporter.RegexpSplitToTable})}? vResult=globalFunctionTableReference | vResult=variableTableReference | vResult=variableMethodCallTableReference @@ -18980,124 +19181,276 @@ selectTableReferenceElementWithoutJoinParenthesis[SubDmlFlags subDmlFlags] retur | vResult=subDmlTableReference[subDmlFlags] | {NextTokenMatches(CodeGenerationSupporter.Predict)}? vResult=predictTableReference[subDmlFlags] + | {NextTokenMatches(CodeGenerationSupporter.VectorSearch)}? + vResult=vectorSearchTableReference | vResult=schemaObjectOrFunctionTableReference ; +aiGenerateChunksTableReference returns [AIGenerateChunksTableReference vResult = null] +{ + ScalarExpression vSource; + Identifier vChunkType; +} + : + {NextTokenMatches(CodeGenerationSupporter.AiGenerateChunks)}? + tFunc:Identifier + { + Match(tFunc, CodeGenerationSupporter.AiGenerateChunks); + } + LeftParenthesis + tSourceToken:Identifier + { + Match(tSourceToken, CodeGenerationSupporter.Source); + } + EqualsSign + vSource = expression + Comma + tChunkTypeToken:Identifier + { + Match(tChunkTypeToken, CodeGenerationSupporter.ChunkType); + } + EqualsSign + vChunkType = identifier + Comma + vResult = aiGenerateFixedChunksTableReference[vSource, vChunkType] + tRParen:RightParenthesis + { + if (vResult != null) + { + UpdateTokenInfo(vResult, tFunc); + UpdateTokenInfo(vResult, tRParen); + } + } + simpleTableReferenceAliasOpt[vResult] + ; + +aiGenerateFixedChunksTableReference [ScalarExpression vSource, Identifier vChunkType] + returns [AIGenerateFixedChunksTableReference vResult = FragmentFactory.CreateFragment()] +{ + Identifier vChunkSizeParam; + Identifier vOverlapParam = null; + Identifier vEnableChunkSetIdParam = null; + + ScalarExpression vChunkSize = null; + ScalarExpression vOverlap = null; + ScalarExpression vEnableChunkSetId = null; +} + : + { + Match(vChunkType, CodeGenerationSupporter.Fixed); + vResult.Source = vSource; + vResult.ChunkType = vChunkType; + } + vChunkSizeParam = identifier + { + Match(vChunkSizeParam, CodeGenerationSupporter.ChunkSize); + } + EqualsSign + vChunkSize = expression + { + vResult.ChunkSize = vChunkSize; + } + + ( + Comma + vOverlapParam = identifier + { + Match(vOverlapParam, CodeGenerationSupporter.Overlap); + } + EqualsSign + vOverlap = expression + { + vResult.Overlap = vOverlap; + } + )? + + ( + Comma + vEnableChunkSetIdParam = identifier + { + Match(vEnableChunkSetIdParam, CodeGenerationSupporter.EnableChunkSetId); + } + EqualsSign + ( + vEnableChunkSetId = integer // constant integer + | vEnableChunkSetId = nullLiteral // NULL literal + ) + { + vResult.EnableChunkSetId = vEnableChunkSetId; + } + )? + ; + +vectorSearchTableReference returns [VectorSearchTableReference vResult = FragmentFactory.CreateFragment()] +{ + TableReferenceWithAlias vTable; + ColumnReferenceExpression vColumn; + ScalarExpression vSimilarTo; + StringLiteral vMetric; + ScalarExpression vTopN; +} + : + tVectorSearch:Identifier LeftParenthesis + { + Match(tVectorSearch, CodeGenerationSupporter.VectorSearch); + UpdateTokenInfo(vResult, tVectorSearch); + } + Table EqualsSign vTable = mergeTarget[false] + { + vResult.Table = vTable; + } + Comma Column EqualsSign vColumn = fixedColumn + { + vResult.Column = vColumn; + } + Comma tSimilarTo:Identifier EqualsSign vSimilarTo = expression + { + Match(tSimilarTo, CodeGenerationSupporter.SimilarTo); + + // Validate that SIMILAR_TO does not contain a subquery + if (vSimilarTo is ScalarSubquery) + { + ThrowParseErrorException("SQL46098", vSimilarTo, TSqlParserResource.SQL46098Message); + } + + vResult.SimilarTo = vSimilarTo; + } + Comma tMetric:Identifier EqualsSign vMetric = stringLiteral + { + Match(tMetric, CodeGenerationSupporter.Metric); + MatchString(vMetric, CodeGenerationSupporter.Cosine, CodeGenerationSupporter.Dot, CodeGenerationSupporter.Euclidean); + vResult.Metric = vMetric; + } + Comma tTopN:Identifier EqualsSign vTopN = signedIntegerOrVariableOrColumnReference + { + Match(tTopN, CodeGenerationSupporter.TopN); + + // Validate that TOP_N is not a negative number + if (vTopN is UnaryExpression unaryExpr && unaryExpr.UnaryExpressionType == UnaryExpressionType.Negative) + { + ThrowParseErrorException("SQL46010", unaryExpr, TSqlParserResource.SQL46010Message, "-"); + } + + vResult.TopN = vTopN; + } + RightParenthesis simpleTableReferenceAliasOpt[vResult] + ; + predictTableReference[SubDmlFlags subDmlFlags] returns [PredictTableReference vResult] - : - {NextTokenMatches(CodeGenerationSupporter.Predict)}? - tPredict:Identifier LeftParenthesis vResult = predictParams[subDmlFlags, ExpressionFlags.None] tRParen:RightParenthesis predictWithClauseOpt[vResult] simpleTableReferenceAliasOpt[vResult] - { - Match(tPredict, CodeGenerationSupporter.Predict); - UpdateTokenInfo(vResult, tPredict); - UpdateTokenInfo(vResult, tRParen); - } - ; + : + {NextTokenMatches(CodeGenerationSupporter.Predict)}? + tPredict:Identifier LeftParenthesis vResult = predictParams[subDmlFlags, ExpressionFlags.None] tRParen:RightParenthesis predictWithClauseOpt[vResult] simpleTableReferenceAliasOpt[vResult] + { + Match(tPredict, CodeGenerationSupporter.Predict); + UpdateTokenInfo(vResult, tPredict); + UpdateTokenInfo(vResult, tRParen); + } + ; predictParams[SubDmlFlags subDmlFlags, ExpressionFlags expressionFlags] returns [PredictTableReference vResult = FragmentFactory.CreateFragment()] { - ScalarExpression vModelVariable; - ScalarSubquery vModelSubquery; - TableReferenceWithAlias vDataSource; - Identifier vRuntime; -} - : - ( - tModelVariable:Identifier EqualsSign vModelVariable = expression - { - Match(tModelVariable, CodeGenerationSupporter.Model); - vResult.ModelVariable = vModelVariable; - UpdateTokenInfo(vResult, tModelVariable); - } - | tModelSubquery:Identifier EqualsSign vModelSubquery = subquery[SubDmlFlags.SelectNotForInsert, expressionFlags] - { - vResult.ModelSubquery = vModelSubquery; - } - ) - Comma tData:Identifier EqualsSign vDataSource = mergeTarget[false] - { - vResult.DataSource = vDataSource; - } - ( - Comma tRunTime:Identifier EqualsSign vRuntime = identifier - { - vResult.RunTime = vRuntime; - } - )? - ; + ScalarExpression vModelVariable; + ScalarSubquery vModelSubquery; + TableReferenceWithAlias vDataSource; + Identifier vRuntime; +} + : + ( + tModelVariable:Identifier EqualsSign vModelVariable = expression + { + Match(tModelVariable, CodeGenerationSupporter.Model); + vResult.ModelVariable = vModelVariable; + UpdateTokenInfo(vResult, tModelVariable); + } + | tModelSubquery:Identifier EqualsSign vModelSubquery = subquery[SubDmlFlags.SelectNotForInsert, expressionFlags] + { + vResult.ModelSubquery = vModelSubquery; + } + ) + Comma tData:Identifier EqualsSign vDataSource = mergeTarget[false] + { + vResult.DataSource = vDataSource; + } + ( + Comma tRunTime:Identifier EqualsSign vRuntime = identifier + { + vResult.RunTime = vRuntime; + } + )? + ; predictWithClauseOpt [PredictTableReference vParent] - : (With) => - ( - (With LeftParenthesis predictSchemaItemList[vParent] tRParen:RightParenthesis - { - UpdateTokenInfo(vParent,tRParen); - } - ) - ) - ; + : (With) => + ( + (With LeftParenthesis predictSchemaItemList[vParent] tRParen:RightParenthesis + { + UpdateTokenInfo(vParent,tRParen); + } + ) + ) + ; predictSchemaItemList [PredictTableReference vParent] { - SchemaDeclarationItem vItem; + SchemaDeclarationItem vItem; } - : vItem = predictSchemaItem - { - AddAndUpdateTokenInfo(vParent, vParent.SchemaDeclarationItems, vItem); - } - (Comma vItem = predictSchemaItem - { - AddAndUpdateTokenInfo(vParent, vParent.SchemaDeclarationItems, vItem); - } - )* - ; + : vItem = predictSchemaItem + { + AddAndUpdateTokenInfo(vParent, vParent.SchemaDeclarationItems, vItem); + } + (Comma vItem = predictSchemaItem + { + AddAndUpdateTokenInfo(vParent, vParent.SchemaDeclarationItems, vItem); + } + )* + ; predictSchemaItem returns [SchemaDeclarationItem vResult = FragmentFactory.CreateFragment()] { - ValueExpression vMapping; - ColumnDefinitionBase vColumn; -} - : vColumn = columnDefinitionBasic - { - vResult.ColumnDefinition = vColumn; - } - (vMapping = stringLiteral - { - vResult.Mapping = vMapping; - } - )? - (As tPredict:Identifier - { - Match(tPredict, CodeGenerationSupporter.Predict); - } - )? - ; + ValueExpression vMapping; + ColumnDefinitionBase vColumn; +} + : vColumn = columnDefinitionBasic + { + vResult.ColumnDefinition = vColumn; + } + (vMapping = stringLiteral + { + vResult.Mapping = vMapping; + } + )? + (As tPredict:Identifier + { + Match(tPredict, CodeGenerationSupporter.Predict); + } + )? + ; mergeTarget[bool indexHintAllowed] returns [TableReferenceWithAlias vResult] { - Identifier vAlias; -} - : - vResult=dmlTarget[indexHintAllowed] - ( - ( - As vAlias = identifier - { - vResult.Alias = vAlias; - } - ) - | - {!NextTokenMatches(CodeGenerationSupporter.Using)}? - ( - vAlias = identifier - { - vResult.Alias = vAlias; - } - ) - | - /* empty */ - ) - ; + Identifier vAlias; +} + : + vResult=dmlTarget[indexHintAllowed] + ( + ( + As vAlias = identifier + { + vResult.Alias = vAlias; + } + ) + | + {!NextTokenMatches(CodeGenerationSupporter.Using)}? + ( + vAlias = identifier + { + vResult.Alias = vAlias; + } + ) + | + /* empty */ + ) + ; changeTableTableReference returns [TableReferenceWithAliasAndColumns vResult] { @@ -20001,20 +20354,20 @@ insertColumn returns [ColumnReferenceExpression vResult = FragmentFactory.Create openRowsetColumn returns [OpenRowsetColumnDefinition vResult = FragmentFactory.CreateFragment()] { ColumnDefinitionBase vColumn; - IntegerLiteral vColumnOrdinal; - StringLiteral vStringLiteral; + IntegerLiteral vColumnOrdinal; + StringLiteral vStringLiteral; } : vColumn = columnDefinitionBasic - { - vResult.ColumnIdentifier = vColumn.ColumnIdentifier; - vResult.DataType = vColumn.DataType; - vResult.Collation = vColumn.Collation; - } - (vColumnOrdinal=integer + { + vResult.ColumnIdentifier = vColumn.ColumnIdentifier; + vResult.DataType = vColumn.DataType; + vResult.Collation = vColumn.Collation; + } + (vColumnOrdinal=integer { vResult.ColumnOrdinal = vColumnOrdinal; } - | vStringLiteral=stringLiteral + | vStringLiteral=stringLiteral { vResult.JsonPath = vStringLiteral; })? @@ -20107,13 +20460,23 @@ schemaObjectTableDmlTarget [bool indexHintAllowed] returns [NamedTableReference )? ; -schemaObjectOrFunctionTableReference returns [TableReference vResult] +schemaObjectOrFunctionTableReference returns [TableReference vResult = null] { SchemaObjectName vSchemaObjectName; } : vSchemaObjectName=schemaObjectFourPartName ( + { + vSchemaObjectName.BaseIdentifier != null && + vSchemaObjectName.BaseIdentifier.Value.Equals(CodeGenerationSupporter.AiGenerateChunks, StringComparison.OrdinalIgnoreCase) && + vSchemaObjectName.BaseIdentifier.QuoteType == QuoteType.NotQuoted && + LT(2).getText().Equals(CodeGenerationSupporter.Source, StringComparison.OrdinalIgnoreCase) + }? + { + vResult = aiGenerateChunksTableReference(); + } + | {IsTableReference(false)}? vResult=schemaObjectTableReference[vSchemaObjectName] | vResult=schemaObjectFunctionTableReference[vSchemaObjectName] @@ -20649,7 +21012,7 @@ openRowsetCosmos returns [OpenRowsetCosmos vResult = FragmentFactory.CreateFragm { long encountered = 0; OpenRowsetCosmosOption vOption; - OpenRowsetColumnDefinition vColumn; + OpenRowsetColumnDefinition vColumn; } : vOption = openRowsetCosmosOptionHint @@ -20710,7 +21073,7 @@ openRowsetBulk returns [BulkOpenRowset vResult = FragmentFactory.CreateFragment< BulkInsertOption vOption; StringLiteral vDataFile; - OpenRowsetColumnDefinition vColumn; + OpenRowsetColumnDefinition vColumn; } : Bulk (( @@ -20745,10 +21108,10 @@ openRowsetBulk returns [BulkOpenRowset vResult = FragmentFactory.CreateFragment< tRParen:RightParenthesis { CheckForDataFileFormatProhibitedOptionsInOpenRowsetBulk(encountered, vDataFile); - CheckForParquetFormatProhibitedOptionsInOpenRowsetBulk(encountered, vResult); + CheckForParquetFormatProhibitedOptionsInOpenRowsetBulk(encountered, vResult); UpdateTokenInfo(vResult,tRParen); } - (tWith:With + (tWith:With { UpdateTokenInfo(vResult,tWith); } @@ -21384,23 +21747,23 @@ simpleGroupByItem [ref bool alreadyEncounteredDistributedAggHint] returns [Expre { vResult.Expression = vExpression; } - ( - // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces - options { greedy = true; } : - With LeftParenthesis tDistributedAgg:Identifier tRParen:RightParenthesis - { - Match(tDistributedAgg, CodeGenerationSupporter.DistributedAgg); + ( + // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options { greedy = true; } : + With LeftParenthesis tDistributedAgg:Identifier tRParen:RightParenthesis + { + Match(tDistributedAgg, CodeGenerationSupporter.DistributedAgg); - if (alreadyEncounteredDistributedAggHint) - ThrowParseErrorException("SQL46129", tDistributedAgg, TSqlParserResource.SQL46129Message); + if (alreadyEncounteredDistributedAggHint) + ThrowParseErrorException("SQL46129", tDistributedAgg, TSqlParserResource.SQL46129Message); - vResult.DistributedAggregation = true; - UpdateTokenInfo(vResult, tDistributedAgg); - UpdateTokenInfo(vResult, tRParen); + vResult.DistributedAggregation = true; + UpdateTokenInfo(vResult, tDistributedAgg); + UpdateTokenInfo(vResult, tRParen); - alreadyEncounteredDistributedAggHint = true; - } - )? + alreadyEncounteredDistributedAggHint = true; + } + )? ; // End of Group By clause @@ -23124,9 +23487,9 @@ createColumnMasterKeyStatement returns [CreateColumnMasterKeyStatement vResult = columnMasterkeyParameter returns [ColumnMasterKeyParameter vResult] : {NextTokenMatches(CodeGenerationSupporter.KeyStoreProviderName)}? vResult = columnMasterKeyStoreProviderNameParameter - | {NextTokenMatches(CodeGenerationSupporter.KeyPath)}? + | {NextTokenMatches(CodeGenerationSupporter.KeyPath)}? vResult = columnMasterKeyPathParameter - | {NextTokenMatches(CodeGenerationSupporter.EnclaveComputations)}? + | {NextTokenMatches(CodeGenerationSupporter.EnclaveComputations)}? vResult = columnMasterKeyEnclaveComputationsParameter ; @@ -23154,14 +23517,14 @@ columnMasterKeyPathParameter returns [ColumnMasterKeyPathParameter vResult = Fra } ; - columnMasterKeyEnclaveComputationsParameter returns [ColumnMasterKeyEnclaveComputationsParameter vResult = FragmentFactory.CreateFragment()] + columnMasterKeyEnclaveComputationsParameter returns [ColumnMasterKeyEnclaveComputationsParameter vResult = FragmentFactory.CreateFragment()] { BinaryLiteral vSignature; } : tEnclaveComputations:Identifier tLeftParens:LeftParenthesis tSignature:Identifier tEquals3:EqualsSign vSignature=binary tRightParens:RightParenthesis { Match(tEnclaveComputations, CodeGenerationSupporter.EnclaveComputations); - Match(tSignature, CodeGenerationSupporter.Signature); + Match(tSignature, CodeGenerationSupporter.Signature); vResult.ParameterKind = ColumnMasterKeyParameterKind.Signature; vResult.Signature = vSignature; } @@ -23606,6 +23969,245 @@ dropSecurityPolicyStatement returns [DropSecurityPolicyStatement vResult = Fragm } ; +createExternalModelStatement returns [CreateExternalModelStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vName; +} + : tModel:Identifier vName = identifier + { + Match(tModel, CodeGenerationSupporter.Model); + vResult.Name = vName; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + tWith:With LeftParenthesis + ( + {NextTokenMatches(CodeGenerationSupporter.Location)}? + externalModelLocation[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ApiFormat)}? + externalModelApiFormat[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelType)}? + externalModelModelType[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelName)}? + externalModelModelName[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Credential)}? + externalModelCredential[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.LocalRuntimePath)}? + externalModelLocalRuntimePath[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Parameters)}? + externalModelParameters[vResult] + ) + + ( + tComma:Comma + ( + {NextTokenMatches(CodeGenerationSupporter.Location)}? + externalModelLocation[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ApiFormat)}? + externalModelApiFormat[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelType)}? + externalModelModelType[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelName)}? + externalModelModelName[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Credential)}? + externalModelCredential[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.LocalRuntimePath)}? + externalModelLocalRuntimePath[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Parameters)}? + externalModelParameters[vResult] + ) + )* + + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + } + ; + +externalModelLocation[ExternalModelStatement vParent] +{ + StringLiteral vLocation; +} + : + tLocation:Identifier EqualsSign vLocation = stringLiteral + { + Match(tLocation, CodeGenerationSupporter.Location); + vParent.Location = vLocation; + } + ; + +externalModelApiFormat[ExternalModelStatement vParent] +{ +StringLiteral vApiFormat; +} +: + tApiFormat:Identifier EqualsSign vApiFormat = stringLiteral + { + Match(tApiFormat, CodeGenerationSupporter.ApiFormat); + vParent.ApiFormat = vApiFormat; + } +; + +externalModelModelType[ExternalModelStatement vParent] + : + tModelType:Identifier + { + Match(tModelType, CodeGenerationSupporter.ModelType); + UpdateTokenInfo(vParent, tModelType); + } + EqualsSign + ( + tEmbeddings:Identifier + { + if (TryMatch(tEmbeddings, CodeGenerationSupporter.Embeddings)) + { + vParent.ModelType = ExternalModelTypeOption.EMBEDDINGS; + UpdateTokenInfo(vParent, tEmbeddings); + } + else + { + ThrowIncorrectSyntaxErrorException(tEmbeddings); + } + } + ) + ; + +externalModelModelName[ExternalModelStatement vParent] +{ + StringLiteral vModelName; +} + : + tModelName:Identifier EqualsSign vModelName = stringLiteral + { + Match(tModelName, CodeGenerationSupporter.ModelName); + vParent.ModelName = vModelName; + } + ; + +externalModelCredential[ExternalModelStatement vParent] +{ +Identifier vCredential; +} +: + tCredential:Identifier EqualsSign vCredential = identifier + { + Match(tCredential, CodeGenerationSupporter.Credential); + vParent.Credential = vCredential; + } +; +externalModelLocalRuntimePath[ExternalModelStatement vParent] +{ +StringLiteral vLocalRuntimePath; +} + : + tLocalRuntimePath:Identifier EqualsSign vLocalRuntimePath = stringLiteral + { + Match(tLocalRuntimePath, CodeGenerationSupporter.LocalRuntimePath); + vParent.LocalRuntimePath = vLocalRuntimePath; + } + ; +externalModelParameters[ExternalModelStatement vParent] +{ + StringLiteral vParameters; +} + : + tParameters:Identifier EqualsSign vParameters = stringLiteral + { + Match(tParameters, CodeGenerationSupporter.Parameters); + vParent.Parameters = vParameters; + } + ; + +alterExternalModelStatement returns [AlterExternalModelStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vName; +} + : tModel:Identifier vName = identifier + { + Match(tModel, CodeGenerationSupporter.Model); + vResult.Name = vName; + ThrowPartialAstIfPhaseOne(vResult); + } + tSet:Set LeftParenthesis + ( + {NextTokenMatches(CodeGenerationSupporter.Location)}? + externalModelLocation[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ApiFormat)}? + externalModelApiFormat[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelType)}? + externalModelModelType[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelName)}? + externalModelModelName[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Credential)}? + externalModelCredential[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.LocalRuntimePath)}? + externalModelLocalRuntimePath[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Parameters)}? + externalModelParameters[vResult] + ) + + ( + tComma:Comma + ( + {NextTokenMatches(CodeGenerationSupporter.Location)}? + externalModelLocation[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ApiFormat)}? + externalModelApiFormat[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelType)}? + externalModelModelType[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.ModelName)}? + externalModelModelName[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Credential)}? + externalModelCredential[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.LocalRuntimePath)}? + externalModelLocalRuntimePath[vResult] + | + {NextTokenMatches(CodeGenerationSupporter.Parameters)}? + externalModelParameters[vResult] + ) + )* + + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + } + ; + +dropExternalModelStatement returns [DropExternalModelStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vName; +} + : tModel:Identifier vName = identifier + { + Match(tModel, CodeGenerationSupporter.Model); + vResult.Name = vName; + ThrowPartialAstIfPhaseOne(vResult); + } + ; + createExternalDataSourceStatement returns [CreateExternalDataSourceStatement vResult = FragmentFactory.CreateFragment()] { Identifier vName; @@ -23695,11 +24297,11 @@ externalDataSourceType[CreateExternalDataSourceStatement vParent] UpdateTokenInfo(vParent, tExternalDataSourceType); vParent.DataSourceType = ExternalDataSourceType.BLOB_STORAGE; } - else - { - UpdateTokenInfo(vParent, tExternalDataSourceType); + else + { + UpdateTokenInfo(vParent, tExternalDataSourceType); vParent.DataSourceType = ExternalDataSourceType.EXTERNAL_GENERICS; - } + } } ) ; @@ -23869,7 +24471,7 @@ createExternalStreamStatement returns [CreateExternalStreamStatement vResult = F {NextTokenMatches(CodeGenerationSupporter.Location)}? externalStreamLocation[vResult] | - {NextTokenMatches(CodeGenerationSupporter.InputOptions)}? + {NextTokenMatches(CodeGenerationSupporter.InputOptions)}? externalStreamInputOptions[vResult] | {NextTokenMatches(CodeGenerationSupporter.OutputOptions)}? @@ -27566,6 +28168,27 @@ xmlCompressionOption returns [XmlCompressionOption vResult = FragmentFactory.Cre )? ; +vectorMetricOption returns [VectorMetricIndexOption vResult = FragmentFactory.CreateFragment()] + : tMetric:Identifier EqualsSign tMetricValue:AsciiStringLiteral + { + Match(tMetric, CodeGenerationSupporter.Metric); + vResult.OptionKind = IndexOptionKind.VectorMetric; + vResult.MetricType = VectorMetricTypeHelper.Instance.ParseOption(tMetricValue); + + UpdateTokenInfo(vResult, tMetric); + } + ; + +vectorTypeOption returns [VectorTypeIndexOption vResult = FragmentFactory.CreateFragment()] + : tType:Identifier EqualsSign tTypeValue:AsciiStringLiteral + { + Match(tType, CodeGenerationSupporter.Type); + vResult.OptionKind = IndexOptionKind.VectorType; + vResult.VectorType = VectorIndexTypeHelper.Instance.ParseOption(tTypeValue); + UpdateTokenInfo(vResult, tType); + } + ; + compressionPartitionRange returns [CompressionPartitionRange vResult = FragmentFactory.CreateFragment()] { ScalarExpression vExpression; @@ -29215,11 +29838,11 @@ generatedAlwaysClause [ColumnDefinition vResult] } else if (TryMatch(tGeneratedType, CodeGenerationSupporter.TransactionId)) { - vResult.GeneratedAlways = GeneratedAlwaysType.TransactionIdStart; + vResult.GeneratedAlways = GeneratedAlwaysType.TransactionIdStart; } else if (TryMatch(tGeneratedType, CodeGenerationSupporter.SequenceNumber)) - { - vResult.GeneratedAlways = GeneratedAlwaysType.SequenceNumberStart; + { + vResult.GeneratedAlways = GeneratedAlwaysType.SequenceNumberStart; } else { @@ -29683,7 +30306,7 @@ uniqueTableConstraint [IndexAffectingStatement statementType] returns [UniqueCon ( uniqueConstraintEnforcement[vResult] | - uniqueConstraintTailOpt[statementType, vResult] + uniqueConstraintTailOpt[statementType, vResult] ) ; @@ -30097,6 +30720,32 @@ xmlDataType [SchemaObjectName vName] returns [XmlDataTypeReference vResult = Fra )? ; +vectorDataType [SchemaObjectName vName] returns [VectorDataTypeReference vResult = FragmentFactory.CreateFragment()] +{ + vResult.Name = vName; + vResult.UpdateTokenInfo(vName); + + IntegerLiteral vDimension = null; + Identifier vBaseType = null; +} + : + ( LeftParenthesis vDimension=integer + { + vResult.Dimension = vDimension; + } + ( + Comma vBaseType=identifier + { + vResult.BaseType = vBaseType; + } + )? + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + } + ) + ; + scalarDataType returns [DataTypeReference vResult = null] { SchemaObjectName vName; @@ -30117,6 +30766,9 @@ scalarDataType returns [DataTypeReference vResult = null] ( {isXmlDataType}? vResult = xmlDataType[vName] + | + {typeOption == SqlDataTypeOption.Vector}? + vResult = vectorDataType[vName] | {typeOption != SqlDataTypeOption.None}? vResult = sqlDataTypeWithoutNational[vName, typeOption] @@ -30519,6 +31171,9 @@ booleanExpressionPrimary [ExpressionFlags expressionFlags] returns [BooleanExpre vResult = vMatchPredicate; UpdateTokenInfo(vResult,tRParen); } + | + {NextTokenMatches(CodeGenerationSupporter.RegexpLike)}? + vResult=regexpLikePredicate | vExpressionFirst=expressionWithFlags[expressionFlags] ( @@ -30900,6 +31555,29 @@ tsEqualCall returns [TSEqualCall vResult = this.FragmentFactory.CreateFragment()] +{ + ScalarExpression vText; + ScalarExpression vPattern; + ScalarExpression vFlags = null; +} + : tRegexp:Identifier LeftParenthesis vText=expression Comma vPattern=expression + { + UpdateTokenInfo(vResult,tRegexp); + vResult.Text = vText; + vResult.Pattern = vPattern; + } + (Comma vFlags=expression + )? + { + vResult.Flags = vFlags; + } + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + } + ; + updateCall returns [UpdateCall vResult = this.FragmentFactory.CreateFragment()] { Identifier vIdentifier; @@ -31451,6 +32129,15 @@ expressionPrimary [ExpressionFlags expressionFlags] returns [PrimaryExpression v | {NextTokenMatches(CodeGenerationSupporter.IIf) && (LA(2) == LeftParenthesis)}? vResult=iIfCall + | + {NextTokenMatches(CodeGenerationSupporter.AIGenerateEmbeddings) && LA(2) == LeftParenthesis}? + vResult = aiGenerateEmbeddingsFunctionCall + | + {NextTokenMatches(CodeGenerationSupporter.JsonObject) && (LA(2) == LeftParenthesis)}? + vResult=jsonObjectCall + | + {NextTokenMatches(CodeGenerationSupporter.JsonArray) && (LA(2) == LeftParenthesis)}? + vResult=jsonArrayCall | (Identifier LeftParenthesis)=> vResult=builtInFunctionCall @@ -31485,6 +32172,134 @@ expressionPrimary [ExpressionFlags expressionFlags] returns [PrimaryExpression v collationOpt[vResult] ; +aiGenerateEmbeddingsFunctionCall + returns [AIGenerateEmbeddingsFunctionCall vResult = this.FragmentFactory.CreateFragment()] +{ + ScalarExpression vInput; + SchemaObjectName vModelName; + ScalarExpression vParams = null; + ScalarExpression vParamsInner; +} + : + tFunc:Identifier LeftParenthesis + { + Match(tFunc, CodeGenerationSupporter.AIGenerateEmbeddings); + UpdateTokenInfo(vResult, tFunc); + } + vInput=expression + { + vResult.Input = vInput; + } + tUse:Use + { + UpdateTokenInfo(vResult, tUse); + } + + tModel:Identifier + { + Match(tModel, CodeGenerationSupporter.Model); + UpdateTokenInfo(vResult, tModel); + } + + // --- MODEL NAME: single-part only (strict) --------------------------------------------- + // We accept exactly **one identifier token** after "USE MODEL". + // + // Why: + // - Users may store model names that *visually* contain dots or spaces, e.g. [dbo.MyDefaultModel]. + // When bracket-delimited, the lexer emits this as a **single** token (QuotedIdentifier), so it's OK. + // - True multipart names (db.schema.model) must be rejected here, so we do NOT consume any Dot tokens. + // + // Allowed (single token): + // USE MODEL MyDefaultModel -- Identifier + // USE MODEL [dbo.MyDefaultModel] -- QuotedIdentifier (one token; dot lives inside the brackets) + // + // Rejected (multipart): + // USE MODEL dbo.MyDefaultModel -- Identifier '.' Identifier (two tokens + Dot) + // USE MODEL [dbo].[MyDefaultModel] -- QuotedIdentifier '.' QuotedIdentifier + // + // Token notes: + // - Identifier : unquoted identifier; cannot contain spaces or '.'. + // - QuotedIdentifier : bracket-delimited; may contain spaces and '.' inside the brackets. + ( + vModelId:Identifier + { + vModelName = this.FragmentFactory.CreateFragment(); + vModelName.Identifiers.Add(this.CreateIdentifierFromToken(vModelId)); + vResult.ModelName = vModelName; + } + | + vModelQId:QuotedIdentifier + { + vModelName = this.FragmentFactory.CreateFragment(); + vModelName.Identifiers.Add(this.CreateIdentifierFromToken(vModelQId)); + vResult.ModelName = vModelName; + } + ) + + // --- Optional PARAMETERS clause --------------------------------------------------------- + // Shape: [PARAMETERS ()] | [PARAMETERS ] + // Goals: + // 1) Accept a general **expression** as the PARAMETERS value. + // 2) Preserve user-written parentheses by constructing a ParenthesisExpression node + // for the "( )" variant so pretty-printing round-trips exactly. + // 3) **Reject** a bare JSON string literal (e.g., PARAMETERS '{...}'); callers must pass + // a parsed JSON expression (e.g., TRY_CONVERT(JSON, N'{}')). + // Notes: + // - Some builds tokenize PARAMETERS as a keyword; others as an Identifier. Support both. + ( + ( + // PARAMETERS as a true keyword token. + tParamsKw:Parameters + { + UpdateTokenInfo(vResult, tParamsKw); + } + | + // PARAMETERS as an identifier token; enforce its text equals "Parameters". + tParams:Identifier + { + Match(tParams, CodeGenerationSupporter.Parameters); + UpdateTokenInfo(vResult, tParams); + } + ) + + // ---- Value of PARAMETERS ----------------------------------------------------------- + ( + // Variant A: user wrote parentheses around the value: PARAMETERS ( ) + // Build a ParenthesisExpression so the printer re-emits parens. + tLP:LeftParenthesis + vParamsInner=expression + tRP:RightParenthesis + { + ParenthesisExpression p = this.FragmentFactory.CreateFragment(); + p.Expression = vParamsInner; + vParams = p; + + // Attach LP/RP token info for accurate script generation. + UpdateTokenInfo(p, tLP); + UpdateTokenInfo(p, tRP); + } + | + // Variant B: bare expression without surrounding parentheses. + // Guardrail: Disallow a leading string literal so that + // PARAMETERS '{"dimensions":768}' + // is a **syntax error**, while + // PARAMETERS TRY_CONVERT(JSON, N'{}') + // is allowed. + // LA(1) is the next token type; block both ASCII ('...') and Unicode (N'...') strings. + { LA(1) != AsciiStringLiteral && LA(1) != UnicodeStringLiteral }? + vParams=expression + ) + { + vResult.OptionalParameters = vParams; + } + )? + + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult, tRParen); + } + ; + parenthesisDisambiguatorForExpressions [ExpressionFlags expressionFlags] returns [PrimaryExpression vResult] : @@ -31784,6 +32599,36 @@ expressionList [TSqlFragment vParent, IList expressions] | /* empty */ ) + ( + jsonReturningClause[vParent] + | + /* empty */ + ) + ; + + jsonObjectAggExpressionList [FunctionCall vParent] + { + JsonKeyValue vExpression; + } + : + ( + vExpression=jsonKeyValueExpression + { + AddAndUpdateTokenInfo(vParent, vParent.JsonParameters, vExpression); + } + | + /* empty */ + ) + ( + jsonNullClauseFunction[vParent] + | + /* empty */ + ) + ( + jsonReturningClause[vParent] + | + /* empty */ + ) ; jsonNullClauseFunction [FunctionCall vParent] @@ -31792,7 +32637,7 @@ expressionList [TSqlFragment vParent, IList expressions] Identifier vAbsent; } : - ( + ( Null On Null { vNull = FragmentFactory.CreateFragment(); @@ -31810,24 +32655,124 @@ expressionList [TSqlFragment vParent, IList expressions] } ) ; - + +/* jsonReturningClause is used by json_object, json_objectagg, json_array, json_arrayagg where only + RETURNING JSON is supported. Any other type with JSON should return in error */ +jsonReturningClause [FunctionCall vParent] +{ + DataTypeReference vDataType; +} +: + tReturning:Identifier vDataType=jsonDataType + { + Match(tReturning, CodeGenerationSupporter.Returning); + UpdateTokenInfo(vParent, tReturning); + vParent.ReturnType.Add(vDataType); + } +; + +jsonDataType returns [SqlDataTypeReference vResult = null] +{ + SchemaObjectName vJsonTypeName; +} +: + vJsonTypeName=schemaObjectTwoPartName + { + // Only allow JSON as the data type + if (vJsonTypeName.BaseIdentifier.Value.ToUpper(CultureInfo.InvariantCulture) != CodeGenerationSupporter.Json) + { + ThrowParseErrorException("SQL46005", vJsonTypeName, + TSqlParserResource.SQL46005Message, CodeGenerationSupporter.Json, vJsonTypeName.BaseIdentifier.Value); + } + + vResult = FragmentFactory.CreateFragment(); + vResult.Name = vJsonTypeName; + vResult.SqlDataTypeOption = SqlDataTypeOption.Json; + vResult.UpdateTokenInfo(vJsonTypeName); + } +; + +/* jsonValueReturningClause is used by json_value. Only json_value support RETURNING syntax*/ +jsonValueReturningClause [FunctionCall vParent] +{ + DataTypeReference vDataType; +} +: + tReturning:Identifier vDataType=scalarDataType + { + Match(tReturning, CodeGenerationSupporter.Returning); + UpdateTokenInfo(vParent, tReturning); + + // JSON_VALUE only supports specific SQL data types in RETURNING clause + if (vDataType is SqlDataTypeReference sqlDataType) + { + bool isAllowedType = sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Int || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.TinyInt || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.SmallInt || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.BigInt || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Float || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Real || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Decimal || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Numeric || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Bit || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.VarChar || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.NVarChar || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Char || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.NChar || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Date || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.Time || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.DateTime2 || + sqlDataType.SqlDataTypeOption == SqlDataTypeOption.DateTimeOffset; + + if (!isAllowedType) + { + ThrowParseErrorException("SQL46005", vDataType, + TSqlParserResource.SQL46005Message, "supported data type", sqlDataType.SqlDataTypeOption.ToString()); + } + } + + vParent.ReturnType.Add(vDataType); + } +; + + jsonKeyValueExpression returns [JsonKeyValue vResult = FragmentFactory.CreateFragment()] { ScalarExpression vKey; ScalarExpression vValue; + MultiPartIdentifier vMultiPartIdentifier = null; } - : + : ( + ((Identifier Dot)? Label)=> + (vMultiPartIdentifier=multiPartIdentifier[2] Dot)? label:Label + { + var identifier = this.FragmentFactory.CreateFragment(); + if (vMultiPartIdentifier == null) + { + vMultiPartIdentifier = this.FragmentFactory.CreateFragment(); + } + var columnRef = this.FragmentFactory.CreateFragment(); + CreateIdentifierFromLabel(label, identifier, vMultiPartIdentifier); + + columnRef.MultiPartIdentifier = vMultiPartIdentifier; + vResult.JsonKeyName=columnRef; + } + vValue=expression + { + vResult.JsonValue=vValue; + } + | vKey=expression - { - vResult.JsonKeyName=vKey; - } + { + vResult.JsonKeyName=vKey; + } Colon vValue=expression - { - vResult.JsonValue=vValue; + { + vResult.JsonValue=vValue; } - ) - ; + ) + ; windowClause returns [WindowClause vResult = FragmentFactory.CreateFragment()] { @@ -32106,6 +33051,18 @@ builtInFunctionCall returns [FunctionCall vResult = FragmentFactory.CreateFragme | {(vResult.FunctionName != null && vResult.FunctionName.Value.ToUpper(CultureInfo.InvariantCulture) == CodeGenerationSupporter.JsonObject)}? jsonObjectBuiltInFunctionCall[vResult] + | + {(vResult.FunctionName != null && vResult.FunctionName.Value.ToUpper(CultureInfo.InvariantCulture) == CodeGenerationSupporter.JsonObjectAgg)}? + jsonObjectAggBuiltInFunctionCall[vResult] + | + {(vResult.FunctionName != null && vResult.FunctionName.Value.ToUpper(CultureInfo.InvariantCulture) == CodeGenerationSupporter.JsonArrayAgg)}? + jsonArrayAggBuiltInFunctionCall[vResult] + | + {(vResult.FunctionName != null && vResult.FunctionName.Value.ToUpper(CultureInfo.InvariantCulture) == CodeGenerationSupporter.JsonQuery)}? + jsonQueryBuiltInFunctionCall[vResult] + | + {(vResult.FunctionName != null && vResult.FunctionName.Value.ToUpper(CultureInfo.InvariantCulture) == CodeGenerationSupporter.JsonValue)}? + jsonValueBuiltInFunctionCall[vResult] | {(vResult.FunctionName != null && vResult.FunctionName.Value.ToUpper(CultureInfo.InvariantCulture) == CodeGenerationSupporter.Trim) && (NextTokenMatches(CodeGenerationSupporter.Leading) | NextTokenMatches(CodeGenerationSupporter.Trailing) | NextTokenMatches(CodeGenerationSupporter.Both))}? @@ -32139,10 +33096,57 @@ jsonArrayBuiltInFunctionCall [FunctionCall vParent] | /* empty */ ) + ( + jsonReturningClause[vParent] + | + /* empty */ + ) + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent, tRParen); + } + ; + +jsonArrayAggBuiltInFunctionCall [FunctionCall vParent] +{ + ScalarExpression vExpression; + OrderByClause vOrderByClause; + OverClause vOverClause; +} + : ( + vExpression=expression + { + AddAndUpdateTokenInfo(vParent, vParent.Parameters, vExpression); + } + ) + ( + vOrderByClause=orderByClause + { + vParent.JsonOrderByClause = vOrderByClause; + } + | + /* empty */ + ) + ( + jsonNullClauseFunction[vParent] + | + /* empty */ + ) + ( + jsonReturningClause[vParent] + | + /* empty */ + ) tRParen:RightParenthesis { UpdateTokenInfo(vParent, tRParen); } + ( + vOverClause=overClauseNoOrderBy + { + vParent.OverClause = vOverClause; + } + )? ; jsonObjectBuiltInFunctionCall [FunctionCall vParent] @@ -32161,6 +33165,79 @@ jsonObjectBuiltInFunctionCall [FunctionCall vParent] } ; +jsonObjectAggBuiltInFunctionCall [FunctionCall vParent] +{ +} + : ( + jsonObjectAggExpressionList[vParent] + | + /* empty */ + ) + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent, tRParen); + } + ; + +jsonQueryBuiltInFunctionCall [FunctionCall vParent] +{ + ScalarExpression vExpression; + ScalarExpression vPath; +} + : vExpression=expression + { + AddAndUpdateTokenInfo(vParent, vParent.Parameters, vExpression); + } + ( + Comma vPath=expression + { + AddAndUpdateTokenInfo(vParent, vParent.Parameters, vPath); + } + )? + ( + With tArray:Identifier tWrapper:Identifier + { + if (!tArray.getText().Equals(CodeGenerationSupporter.Array, StringComparison.OrdinalIgnoreCase)) + { + throw GetUnexpectedTokenErrorException(tArray); + } + if (!tWrapper.getText().Equals(CodeGenerationSupporter.Wrapper, StringComparison.OrdinalIgnoreCase)) + { + throw GetUnexpectedTokenErrorException(tWrapper); + } + vParent.WithArrayWrapper = true; + } + )? + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent, tRParen); + } + ; + +jsonValueBuiltInFunctionCall [FunctionCall vParent] +{ + ScalarExpression vExpression; + ScalarExpression vPath; +} + : vExpression=expression + { + AddAndUpdateTokenInfo(vParent, vParent.Parameters, vExpression); + } + Comma vPath=expression + { + AddAndUpdateTokenInfo(vParent, vParent.Parameters, vPath); + } + ( + jsonValueReturningClause[vParent] + | + /* empty */ + ) + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent, tRParen); + } + ; + regularBuiltInFunctionCall [FunctionCall vParent] { ColumnReferenceExpression vColumn; @@ -32224,19 +33301,19 @@ ignoreRespectNulls [FunctionCall vParent] Identifier vNulls; } : - tIgnoreOrRespect:Identifier + tIgnoreOrRespect:Identifier { - Match(tIgnoreOrRespect, CodeGenerationSupporter.Ignore, CodeGenerationSupporter.Respect); + Match(tIgnoreOrRespect, CodeGenerationSupporter.Ignore, CodeGenerationSupporter.Respect); vIgnoreOrRespect = FragmentFactory.CreateFragment(); - AddAndUpdateTokenInfo(vParent, vParent.IgnoreRespectNulls, vIgnoreOrRespect); - vIgnoreOrRespect.SetUnquotedIdentifier(tIgnoreOrRespect.getText()); + AddAndUpdateTokenInfo(vParent, vParent.IgnoreRespectNulls, vIgnoreOrRespect); + vIgnoreOrRespect.SetUnquotedIdentifier(tIgnoreOrRespect.getText()); } tNulls:Identifier { - Match(tNulls, CodeGenerationSupporter.Nulls); + Match(tNulls, CodeGenerationSupporter.Nulls); vNulls = FragmentFactory.CreateFragment(); - AddAndUpdateTokenInfo(vParent, vParent.IgnoreRespectNulls, vNulls); - vNulls.SetUnquotedIdentifier(tNulls.getText()); + AddAndUpdateTokenInfo(vParent, vParent.IgnoreRespectNulls, vNulls); + vNulls.SetUnquotedIdentifier(tNulls.getText()); } ; @@ -32455,6 +33532,32 @@ iIfCall returns [IIfCall vResult = this.FragmentFactory.CreateFragment( } ; +jsonObjectCall returns [FunctionCall vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : vIdentifier=nonQuotedIdentifier + { + Match(vIdentifier, CodeGenerationSupporter.JsonObject); + vResult.FunctionName = vIdentifier; + } + LeftParenthesis + jsonObjectBuiltInFunctionCall[vResult] + ; + +jsonArrayCall returns [FunctionCall vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : vIdentifier=nonQuotedIdentifier + { + Match(vIdentifier, CodeGenerationSupporter.JsonArray); + vResult.FunctionName = vIdentifier; + } + LeftParenthesis + jsonArrayBuiltInFunctionCall[vResult] + ; + coalesceExpression [ExpressionFlags expressionFlags] returns [CoalesceExpression vResult = this.FragmentFactory.CreateFragment()] { ScalarExpression vExpression; @@ -32880,6 +33983,24 @@ signedIntegerOrVariableOrNull returns [ScalarExpression vResult] | vResult=nullLiteral ; +signedIntegerOrVariableOrColumnReference returns [ScalarExpression vResult] + : vResult=signedInteger + | vResult=variable + | vResult=vectorSearchColumnReferenceExpression + ; + +vectorSearchColumnReferenceExpression returns [ColumnReferenceExpression vResult = this.FragmentFactory.CreateFragment()] +{ + MultiPartIdentifier vMultiPartIdentifier; +} + : + vMultiPartIdentifier=multiPartIdentifier[4] + { + vResult.ColumnType = ColumnType.Regular; + vResult.MultiPartIdentifier = vMultiPartIdentifier; + } + ; + stringLiteralOrNull returns [Literal vResult] : vResult=stringLiteral | vResult=nullLiteral @@ -33302,9 +34423,9 @@ nonEmptyString returns [StringLiteral vResult] } ; - defaultValueLiteral returns [ScalarExpression vResult] - : vResult = literal - | vResult = signedIntegerOrReal; + defaultValueLiteral returns [ScalarExpression vResult] + : vResult = literal + | vResult = signedIntegerOrReal; stringLiteral returns [StringLiteral vResult = this.FragmentFactory.CreateFragment()] : tAsciiStringLiteral:AsciiStringLiteral @@ -33566,6 +34687,7 @@ securityStatementPermission returns [Identifier vResult = this.FragmentFactory.C } : tId:Identifier + | AiGenerateEmbeddings | Add | All | Alter diff --git a/SqlScriptDom/Parser/TSql/TSql170ParserBaseInternal.cs b/SqlScriptDom/Parser/TSql/TSql170ParserBaseInternal.cs index 5c332e5..9b5146b 100644 --- a/SqlScriptDom/Parser/TSql/TSql170ParserBaseInternal.cs +++ b/SqlScriptDom/Parser/TSql/TSql170ParserBaseInternal.cs @@ -46,5 +46,68 @@ public TSql170ParserBaseInternal(bool initialQuotedIdentifiersOn) } #endregion + + /// + /// Parses security object kind with support for External Model (TSql170+) + /// + /// The first identifier. + /// The second identifier. + /// The security object kind. + protected SecurityObjectKind ParseSecurityObjectKindTSql170(Identifier identifier1, Identifier identifier2) + { + if (identifier1 == null) + { + throw new ArgumentNullException(nameof(identifier1)); + } + + switch (identifier1.Value.ToUpperInvariant()) + { + case CodeGenerationSupporter.External: + Match(identifier2, CodeGenerationSupporter.Model); + return SecurityObjectKind.ExternalModel; + default: + // Fall back to the base class implementation for all other cases + return TSql160ParserBaseInternal.ParseSecurityObjectKind(identifier1, identifier2); + } + } + + /// + /// Checks if VECTOR keyword appears in the upcoming tokens within a reasonable lookahead window. + /// Used to determine if SaveGuessing optimization is needed for VECTOR data type parsing. + /// + /// true if VECTOR keyword found in lookahead; false otherwise + protected bool ContainsVectorInLookahead() + { + // Scan ahead looking for VECTOR keyword (case-insensitive identifier match) + + const int LookaheadLimit = 100; // Define a named constant for the lookahead limit + // We scan up to LookaheadLimit tokens to handle deeply nested JOIN structures with VECTOR types + for (int i = 1; i <= LookaheadLimit; i++) + { + IToken token; + try + { + token = LT(i); + } + catch (Exception ex) + { + Debug.WriteLine($"Error accessing token at lookahead index {i}: {ex.Message}"); + break; + } + if (token == null || token.Type == Token.EOF_TYPE) + { + break; + } + + // Check if this is an identifier token with text "VECTOR" + if (token.Type == TSql170ParserInternal.Identifier && + string.Equals(token.getText(), CodeGenerationSupporter.Vector, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } } } diff --git a/SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs b/SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs index 120adcc..4f47429 100644 --- a/SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs +++ b/SqlScriptDom/Parser/TSql/TSql80ParserBaseInternal.cs @@ -263,6 +263,25 @@ internal static void UpdateTokenInfo(TSqlFragment fragment, antlr.IToken token) fragment.UpdateTokenInfo(tokenIndex, tokenIndex); } + /// + /// Creates an identifier from a label token and adds it to the multipart identifier. + /// + /// + /// + /// + internal static void CreateIdentifierFromLabel(antlr.IToken token, Identifier identifier, MultiPartIdentifier multiPartIdentifier) + { + var tokenText = token?.getText(); + if (string.IsNullOrEmpty(tokenText)) + { + throw GetUnexpectedTokenErrorException(token); + } + var identifierName = tokenText?.EndsWith(":") == true ? tokenText.Substring(0, tokenText.Length - 1) : tokenText; + identifier.SetIdentifier(identifierName); + UpdateTokenInfo(identifier, token); + AddAndUpdateTokenInfo(multiPartIdentifier, multiPartIdentifier.Identifiers, identifier); + } + protected static void AddAndUpdateTokenInfo(TSqlFragment node, IList collection, TFragmentType item) where TFragmentType : TSqlFragment { @@ -273,8 +292,9 @@ protected static void AddAndUpdateTokenInfo(TSqlFragment node, IL protected static void AddAndUpdateTokenInfo(TSqlFragment node, IList collection, IList otherCollection) where TFragmentType : TSqlFragment { - foreach (TFragmentType item in otherCollection) + for (int i = 0; i < otherCollection.Count; i++) { + TFragmentType item = otherCollection[i]; AddAndUpdateTokenInfo(node, collection, item); } } @@ -777,21 +797,48 @@ protected bool IsNextRuleBooleanParenthesis() int caseDepth = 0; // 0 means there was no select int topmostSelect = 0; - int insideIIf = 0; + // Stack to track paren levels where IIF calls started + // This allows proper handling of multiple boolean operators inside IIF + Stack iifParenLevels = new Stack(); + bool pendingIIf = false; for (bool loop = true; loop == true; consume()) { + // Check if the previous token was IIF and this token is its opening parenthesis + // This must be done before resetting pendingIIf, as we need to consume the flag + bool isIIfOpeningParen = pendingIIf && LA(1) == TSql80ParserInternal.LeftParenthesis; + + // Reset pendingIIf at start of each iteration - it will only be set to true + // if this token is the IIF identifier. This ensures IIF must be immediately + // followed by ( to be recognized as a function call. + pendingIIf = false; + switch (LA(1)) { case TSql80ParserInternal.Identifier: // if identifier is IIF if(NextTokenMatches(CodeGenerationSupporter.IIf)) { - ++insideIIf; + // Mark that we're expecting IIF's opening parenthesis next + pendingIIf = true; + } + // if identifier is REGEXP_LIKE + else if(NextTokenMatches(CodeGenerationSupporter.RegexpLike)) + { + if (caseDepth == 0 && topmostSelect == 0 && iifParenLevels.Count == 0) + { + matches = true; + loop = false; + } } break; case TSql80ParserInternal.LeftParenthesis: ++openParens; + if (isIIfOpeningParen) + { + // Record the paren level where IIF started + iifParenLevels.Push(openParens); + } break; case TSql80ParserInternal.RightParenthesis: if (openParens == topmostSelect) @@ -799,6 +846,12 @@ protected bool IsNextRuleBooleanParenthesis() topmostSelect = 0; } + // Check if we're closing an IIF's parenthesis + if (iifParenLevels.Count > 0 && iifParenLevels.Peek() == openParens) + { + iifParenLevels.Pop(); + } + --openParens; if (openParens == 0) { @@ -828,18 +881,13 @@ protected bool IsNextRuleBooleanParenthesis() case TSql80ParserInternal.Exists: case TSql80ParserInternal.TSEqual: case TSql80ParserInternal.Update: - if (caseDepth == 0 && topmostSelect == 0 && insideIIf == 0) + if (caseDepth == 0 && topmostSelect == 0 && iifParenLevels.Count == 0) { // The number of open paranthesis are not important. - // Unless inside an iff + // Unless inside an IIF (tracked by paren level stack) matches = true; loop = false; } - else if (insideIIf > 0) - { - // Found the operator inside IIF - --insideIIf; - } break; case TSql80ParserInternal.Case: ++caseDepth; @@ -1452,8 +1500,8 @@ protected static void VerifyAllowedIndexOption(IndexAffectingStatement statement if (option.OptionKind == IndexOptionKind.DropExisting || option.OptionKind == IndexOptionKind.LobCompaction || option.OptionKind == IndexOptionKind.Order || - option.OptionKind == IndexOptionKind.Resumable || - option.OptionKind == IndexOptionKind.MaxDuration) + ((versionFlags & SqlVersionFlags.TSql160AndAbove) == 0 && option.OptionKind == IndexOptionKind.Resumable) || + ((versionFlags & SqlVersionFlags.TSql120AndAbove) == 0 && option.OptionKind == IndexOptionKind.MaxDuration)) { invalidOption = true; } diff --git a/SqlScriptDom/Parser/TSql/TSqlFabricDW.g b/SqlScriptDom/Parser/TSql/TSqlFabricDW.g new file mode 100644 index 0000000..77019ed --- /dev/null +++ b/SqlScriptDom/Parser/TSql/TSqlFabricDW.g @@ -0,0 +1,34654 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation. All rights reserved. +// +//------------------------------------------------------------------------------ + +// sacaglar: Handling position information for ASTs +// The properties and AddX (X is the type of the parameter, e.g., AddStatement) +// update the position information of the AST. In the rare case when +// we are setting a bool, int, or string etc. we have to call UpdateTokenInfo +// with the token (because we are not passing the property a token, we are just +// passing a bool, int etc. +// Also for token that we don't track of like Comma, Semicolon etc. we have to +// call the same function. Alternatively the properties(StartOffset, FragmentLength) +// on Fragment.cs can be used for this purpose. + +options { + language = "CSharp"; + namespace = "Microsoft.SqlServer.TransactSql.ScriptDom"; +} + +{ + using System.Diagnostics; + using System.Globalization; + using System.Collections.Generic; +} + +class TSqlFabricDWParserInternal extends Parser("TSqlFabricDWParserBaseInternal"); +options { + k = 2; + defaultErrorHandler=false; + classHeaderPrefix = "internal partial"; + importVocab = TSql; +} + +{ + public TSqlFabricDWParserInternal(bool initialQuotedIdentifiersOn) + : base(initialQuotedIdentifiersOn) + { + initialize(); + } +} + +// Figure out a way to refactor exception handling +entryPointChildObjectName returns [ChildObjectName vResult = null] + : + vResult=childObjectNameWithThreePrefixes + EOF + ; + +entryPointSchemaObjectName returns [SchemaObjectName vResult = null] + : + vResult=schemaObjectFourPartName + EOF + ; + +entryPointScalarDataType returns [DataTypeReference vResult = null] + : + vResult = scalarDataType + EOF + ; + +entryPointExpression returns [ScalarExpression vResult = null] + : + vResult = expression + EOF + ; + +entryPointBooleanExpression returns [BooleanExpression vResult = null] + : + vResult = booleanExpression + EOF + ; + +entryPointStatementList returns [StatementList vResult = null] +{ + bool vParseErrorOccurred = false; +} + : + vResult = statementList[ref vParseErrorOccurred] + { + if (vParseErrorOccurred) + vResult = null; + } + EOF + ; + +entryPointSubqueryExpressionWithOptionalCTE returns [SelectStatement vResult = null] +{ + SelectFunctionReturnType vRetType; +} + : + vRetType = functionReturnClauseRelational + { + vResult = vRetType.SelectStatement; + } + EOF + ; + +entryPointIPv4Address returns [IPv4 vResult = null] + : + vResult = ipAddressV4 + EOF + ; + +entryPointConstantOrIdentifier returns [TSqlFragment vResult = null] + : + vResult = possibleNegativeConstantOrIdentifier + EOF + ; + +entryPointConstantOrIdentifierWithDefault returns [TSqlFragment vResult = null] + : + vResult = possibleNegativeConstantOrIdentifierWithDefault + EOF + ; + +script returns [TSqlScript vResult = this.FragmentFactory.CreateFragment()] +{ + TSqlBatch vCurrentBatch; + + // Script always includes all of the tokens... + if (vResult.ScriptTokenStream != null && vResult.ScriptTokenStream.Count > 0) + { + vResult.UpdateTokenInfo(0,vResult.ScriptTokenStream.Count-1); + } +} + : vCurrentBatch = batch + { + if (vCurrentBatch != null) + AddAndUpdateTokenInfo(vResult, vResult.Batches, vCurrentBatch); + } + ( + Go + { + ResetQuotedIdentifiersSettingToInitial(); + ThrowPartialAstIfPhaseOne(null); + } + vCurrentBatch = batch + { + if (vCurrentBatch != null) + AddAndUpdateTokenInfo(vResult, vResult.Batches, vCurrentBatch); + } + + )* + + tEof:EOF + { + UpdateTokenInfo(vResult,tEof); + } + ; + +// TODO, sacaglar: Tracking issue, bug# 584772 +batch returns [TSqlBatch vResult = null] +{ + TSqlStatement vStatement; +} + : (tSemi:Semicolon)* + ( + ( + Create (((Or Alter)? ( Proc | Procedure | Trigger | View | Function)) | ( Default | Rule | Schema | {NextTokenMatches(CodeGenerationSupporter.Federation)}? | {NextTokenMatches(CodeGenerationSupporter.Materialized)}? )) + | + Alter ( Proc | Procedure | Trigger | View | Function | {NextTokenMatches(CodeGenerationSupporter.Federation)}? | {NextTokenMatches(CodeGenerationSupporter.Materialized)}? ) + | + Use {NextTokenMatches(CodeGenerationSupporter.Federation) && LA(2) == Identifier}? + )=> + ( + vStatement=lastStatementOptSemi + { + if (vStatement != null) + { + if (vResult == null) + { + vResult = this.FragmentFactory.CreateFragment(); + } + AddAndUpdateTokenInfo(vResult, vResult.Statements, vStatement); + } + } + ) + | + (vStatement = optSimpleExecute + { + if (vStatement != null) // Can be empty + { + ThrowPartialAstIfPhaseOne(vStatement); + + if (vResult == null) + vResult = this.FragmentFactory.CreateFragment(); + + AddAndUpdateTokenInfo(vResult, vResult.Statements, vStatement); + } + } + (vStatement=statementOptSemi + { + if (vStatement != null) // statement can be null if there was a parse error. + { + if (vResult == null) + vResult = this.FragmentFactory.CreateFragment(); + + AddAndUpdateTokenInfo(vResult, vResult.Statements, vStatement); + } + } + )* + ) + ) + ; + exception + catch[TSqlParseErrorException exception] + { + if (!exception.DoNotLog) + { + AddParseError(exception.ParseError); + } + RecoverAtBatchLevel(); + } + catch[antlr.NoViableAltException exception] + { + ParseError error = GetFaultTolerantUnexpectedTokenError( + exception.token, exception, _tokenSource.LastToken.Offset); + AddParseError(error); + RecoverAtBatchLevel(); + } + catch[antlr.MismatchedTokenException exception] + { + ParseError error = GetFaultTolerantUnexpectedTokenError( + exception.token, exception, _tokenSource.LastToken.Offset); + AddParseError(error); + RecoverAtBatchLevel(); + } + catch[antlr.RecognitionException] + { + ParseError error = GetUnexpectedTokenError(); + AddParseError(error); + RecoverAtBatchLevel(); + } + catch[antlr.TokenStreamRecognitionException exception] + { + // This exception should be handled when we are creating TSqlTokenStream... + ParseError error = ProcessTokenStreamRecognitionException(exception, _tokenSource.LastToken.Offset); + AddParseError(error); + RecoverAtBatchLevel(); + } + catch[antlr.ANTLRException exception] + { + CreateInternalError("batch", exception); + } + +statementOptSemi returns [TSqlStatement vResult = null] + : vResult=statement optSemicolons[vResult] + ; + +lastStatementOptSemi returns [TSqlStatement vResult = null] + : vResult=lastStatement optSemicolons[vResult] + ; + +optSemicolons[TSqlStatement vParent] +{ + int nSemicolons = 0; +} + : ( + // Greedy behavior is good enough, we ignore the semicolons + options {greedy = true; } : + tSemi:Semicolon + { + ++nSemicolons; + if (vParent != null) // vResult can be null if there was a parse error. + UpdateTokenInfo(vParent,tSemi); + } + )* + ; + + +///////////////////////////////////////////////// +// Copy Command +///////////////////////////////////////////////// +copyStatement returns [CopyStatement vResult = FragmentFactory.CreateFragment()] +{ + SchemaObjectName vTo; + StringLiteral vFrom; +} + : + tCopy:Identifier + { + Match(tCopy, CodeGenerationSupporter.CopyCommand); + } + tInto: Into + vTo = schemaObjectThreePartName + { + UpdateTokenInfo(vResult,tCopy); + vResult.Into = vTo; + } + (copyColumnList[vResult])? + From vFrom = nonEmptyString /* No need to verify null as it's already nonempty */ + { + ExternalFileOption.CheckXMLValidity(vFrom, CodeGenerationSupporter.From); + AddAndUpdateTokenInfo(vResult, vResult.From, vFrom); + } + (Comma vFrom = nonEmptyString + { + ExternalFileOption.CheckXMLValidity(vFrom, CodeGenerationSupporter.From); + AddAndUpdateTokenInfo(vResult, vResult.From, vFrom); + } + )* + (copyWithClause[vResult])? + (optimizerHints[vResult, vResult.OptimizerHints])? +; + +copyColumnList [CopyStatement vParent] +{ + CopyOption copyOption = FragmentFactory.CreateFragment(); + ListTypeCopyOption vColumnOptions = FragmentFactory.CreateFragment();; + CopyColumnOption vColumnOption; + Int32 columnCount = 0; +} +: + tLParen: LeftParenthesis + { + copyOption.Kind = CopyOptionKind.ColumnOptions; + } + vColumnOption = copyColumnOption[ref columnCount, tLParen] + { + AddAndUpdateTokenInfo(vColumnOptions, vColumnOptions.Options, vColumnOption); + } + (Comma vColumnOption = copyColumnOption[ref columnCount, tLParen] + { + AddAndUpdateTokenInfo(vColumnOptions, vColumnOptions.Options, vColumnOption); + } + )* + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent,tRParen); + copyOption.Value = vColumnOptions; + AddAndUpdateTokenInfo(vParent, vParent.Options, copyOption); + } +; + +copyColumnOption [ref Int32 columnCount, IToken tToken] returns [CopyColumnOption vResult = FragmentFactory.CreateFragment()] +{ + Identifier vColumnName; + ScalarExpression vDefaultColumnValue = null; + bool vDefaultSpecified = false; + IntegerLiteral vFieldNumber = null; +} + : + vColumnName = identifier + { + CheckAndIncrementColumnCount(ref columnCount, tToken); + } + (tDefault:Default vDefaultColumnValue = defaultValueLiteral + { + vDefaultSpecified = true; + })? + (vFieldNumber = integer)? + { + CopyListOptionsHelper.Instance.AssignCopyColumnOptions(vResult, vColumnName, vDefaultColumnValue, vDefaultSpecified, vFieldNumber, columnCount); + } +; + +copyWithClause [CopyStatement vParent] +{ + CopyOption vOption; + Int32 encountered = 0; +} + : With LeftParenthesis vOption = copyOption[ref encountered] + { + AddAndUpdateTokenInfo(vParent, vParent.Options, vOption); + } + (Comma vOption = copyOption[ref encountered] + { + AddAndUpdateTokenInfo(vParent, vParent.Options, vOption); + } + )* + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent,tRParen); + } +; + +copyOption [ref Int32 encountered] returns [CopyOption vResult = FragmentFactory.CreateFragment()] +{ + CopyStatementOptionBase vValue = null; +} + : + ( + tOption:Identifier + { + vResult.Kind = CopyIdentifierOrValueOptionsHelper.Instance.ParseOption(tOption); + CheckCopyOptionDuplication(ref encountered, vResult.Kind, tOption); + } + | tIdentityInsert:IdentityInsert + { + vResult.Kind = CopyOptionKind.Identity_Insert; + CheckCopyOptionDuplication(ref encountered, vResult.Kind, tIdentityInsert); + } + | tCredential:Credential + { + vResult.Kind = CopyOptionKind.Credential; + CheckCopyOptionDuplication(ref encountered, vResult.Kind, tCredential); + } + ) + EqualsSign + ( + vValue = singleValueTypeCopyOption + { + CopyIdentifierOrValueOptionsHelper.Instance.AssignValueToCopyOption(vResult, (SingleValueTypeCopyOption) vValue); + } + | + vValue = copyCredentialOption + { + if ((vResult.Kind != CopyOptionKind.Credential || vResult.Kind != CopyOptionKind.ErrorFileCredential) && + CopyIdentifierOrValueOptionsHelper.Instance.ValidateCopyCredential((CopyCredentialOption)vValue)) + { + vResult.Value = (CopyCredentialOption) vValue; + } + else + { + ThrowIncorrectSyntaxErrorException(vValue); + } + } + ) +; + +singleValueTypeCopyOption returns [SingleValueTypeCopyOption vResult = FragmentFactory.CreateFragment()] +{ + Identifier vValue; + Literal vLiteral; +} + : + vValue=identifier + { + vResult.SingleValue = IdentifierOrValueExpression(vValue); + } + | vLiteral = integer + { + vResult.SingleValue = IdentifierOrValueExpression(vLiteral); + } + | vLiteral = stringLiteral + { + vResult.SingleValue = IdentifierOrValueExpression(vLiteral); + } +; + +copyCredentialOption returns [CopyCredentialOption vResult = FragmentFactory.CreateFragment()] +{ + StringLiteral vIdentityValue; + StringLiteral vSecretValue; +} + : + LeftParenthesis + Identity EqualsSign vIdentityValue = stringLiteral + { + vResult.Identity = vIdentityValue; + } + (Comma tSecret:Identifier EqualsSign vSecretValue = stringLiteral + { + Match(tSecret, CodeGenerationSupporter.Secret); + CopyIdentifierOrValueOptionsHelper.Instance.ValidateSecret(vSecretValue); + vResult.Secret = (StringLiteral) vSecretValue; + })? + RightParenthesis +; + + +///////////////////////////////////////////////// +// Rename table statement +///////////////////////////////////////////////// + +/* Syntax: +RENAME OBJECT [::] [Database].[Schema].OldTable TO NewTable +*/ + +renameEntityStatement returns [RenameEntityStatement vResult = this.FragmentFactory.CreateFragment()] +{ + SchemaObjectName vSchemaObjectName; + Identifier vRenamedEntityType; + Identifier vNewName; +} + :tRename:Identifier vRenamedEntityType = securityStatementPermission + { + Match(tRename, CodeGenerationSupporter.Rename); + vResult.RenameEntityType = ParseSecurityObjectKind(vRenamedEntityType); + if(!(vResult.RenameEntityType == SecurityObjectKind.Object)) + { + // RenameEntityStatement::Translate() throws an unexpected exception if the entity type is not + // "object". Since other object kinds are not supported in this statement, we throw + // a syntax error. + throw GetUnexpectedTokenErrorException(vRenamedEntityType); + } + } + + ( + DoubleColon + { + vResult.SeparatorType = SeparatorType.DoubleColon; + } + )? + + vSchemaObjectName = schemaObjectThreePartName + { + vResult.OldName = vSchemaObjectName; + } + To + ( + vNewName = identifier + { + vResult.NewName = vNewName; + } + ) + + { + UpdateTokenInfo(vResult, tRename); + } + ; + +///////////////////////////////////////////////// +// CREATE TABLE AS SELECT STATEMENT +///////////////////////////////////////////////// + +ctasCreateTableStatement [CreateTableStatement vParent] +{ + SelectStatement vSelectStatement; +} + : + (columnNameList[vParent, vParent.CtasColumns])? + ( + options {greedy = true; } : + withTableOptions[vParent] + ) + As + vSelectStatement = selectStatement[SubDmlFlags.None] + { + vParent.SelectStatement = vSelectStatement; + } + { + CheckCtasStatementHasDistributionOption(vParent); + } + ; + +///////////////////////////////////////////////// +// CREATE EXTERNAL TABLE AS SELECT STATEMENT +///////////////////////////////////////////////// +ctasCreateExternalTableStatement [CreateExternalTableStatement vParent] +{ + SelectStatement vSelectStatement; +} + : + ( + options { greedy = true; } : + withExternalTableOptions[vParent] + ) + As + vSelectStatement = selectStatement[SubDmlFlags.None] + { + vParent.SelectStatement = vSelectStatement; + } + { + CheckExternalTableCtasStatementHasNotRejectedRowLocationOption(vParent); + } + ; + +// This rule conflicts with identifierStatements (both can start with Identifier) +// We should update predicates here and in identifierStatements at the same time +optSimpleExecute returns [ExecuteStatement vResult = null] +{ + ExecutableProcedureReference vExecProc; + ExecuteSpecification vExecuteSpecification; +} + : {!NextTokenMatches(CodeGenerationSupporter.Disable) && !NextTokenMatches(CodeGenerationSupporter.Enable) && + !NextTokenMatches(CodeGenerationSupporter.Move) && !NextTokenMatches(CodeGenerationSupporter.Get) && + !NextTokenMatches(CodeGenerationSupporter.Receive) && !NextTokenMatches(CodeGenerationSupporter.Send) && + !NextTokenMatches(CodeGenerationSupporter.Throw) && !NextTokenMatches(CodeGenerationSupporter.Rename)}? + (vExecProc = execProc + { + vResult = FragmentFactory.CreateFragment(); + vExecuteSpecification = FragmentFactory.CreateFragment(); + vExecuteSpecification.ExecutableEntity = vExecProc; + vResult.ExecuteSpecification=vExecuteSpecification; + } + optSemicolons[vResult] + ) + | /* empty */ + ; + +statement returns [TSqlStatement vResult = null] +{ + // The next tokens offset is cached to help error + // recovery, so when error occurs if the next token is + // Create or Alter, and its offset is the same as + // vNextTokenOffset that means, this rule already + // tried to parsed and failed, so we should skip over. + // The case where it works is: + // select * from create table t1(c1 int) + int nextTokenLine = LT(1).getLine(); + int nextTokenColumn = LT(1).getColumn(); +} + : vResult=createTableStatement + | vResult=alterTableStatement + | vResult=createIndexStatement + | vResult=copyStatement + | vResult=declareStatements + | vResult=setStatements + | vResult=beginStatements + | vResult=breakStatement + | vResult=continueStatement + | vResult=ifStatement + | vResult=whileStatement + | vResult=labelStatement + | vResult=backupStatements + | vResult=restoreStatements + | vResult=gotoStatement + | vResult=saveTransactionStatement + | vResult=rollbackTransactionStatement + | vResult=commitTransactionStatement + | vResult=createStatisticsStatement + | vResult=updateStatisticsStatement + | vResult=alterDatabaseStatements + | vResult=executeStatement + | vResult=withCommonTableExpressionsAndXmlNamespacesStatements + | vResult=raiseErrorStatement + | vResult=alter2005Statements + | vResult=create2005Statements + | vResult=createDatabaseStatements + | vResult=addStatements + | vResult=identifierStatements + | vResult=printStatement + | vResult=waitForStatement + | vResult=readTextStatement + | vResult=updateTextStatement + | vResult=writeTextStatement + | vResult=lineNoStatement + | vResult=useStatement + | vResult=killStatements + | vResult=bulkInsertStatement + | vResult=insertBulkStatement + | vResult=checkpointStatement + | vResult=reconfigureStatement + | vResult=shutdownStatement + | vResult=setUserStatement + | vResult=truncateTableStatement + | vResult=grantStatement90 + | vResult=denyStatement90 + | vResult=revokeStatement90 + | vResult=returnStatement + | vResult=openStatements + | vResult=closeStatements + | vResult=deallocateCursorStatement + | vResult=fetchCursorStatement + | vResult=dropStatements + | vResult=dbccStatement + | vResult=revertStatement + | vResult=executeAsStatement + | vResult=endConversationStatement + ; + exception + catch[TSqlParseErrorException exception] + { + if (!exception.DoNotLog) + { + AddParseError(exception.ParseError); + } + RecoverAtStatementLevel(nextTokenLine, nextTokenColumn); + } + catch[antlr.NoViableAltException exception] + { + ParseError error = GetFaultTolerantUnexpectedTokenError( + exception.token, exception, _tokenSource.LastToken.Offset); + AddParseError(error); + RecoverAtStatementLevel(nextTokenLine, nextTokenColumn); + } + catch[antlr.MismatchedTokenException exception] + { + ParseError error = GetFaultTolerantUnexpectedTokenError( + exception.token, exception, _tokenSource.LastToken.Offset); + AddParseError(error); + RecoverAtStatementLevel(nextTokenLine, nextTokenColumn); + } + catch[antlr.RecognitionException] + { + ParseError error = GetUnexpectedTokenError(); + AddParseError(error); + RecoverAtStatementLevel(nextTokenLine, nextTokenColumn); + } + catch[antlr.TokenStreamRecognitionException exception] + { + // This exception should be handled when we are creating TSqlTokenStream... + ParseError error = ProcessTokenStreamRecognitionException(exception, _tokenSource.LastToken.Offset); + AddParseError(error); + RecoverAtStatementLevel(nextTokenLine, nextTokenColumn); + } + catch[antlr.ANTLRException exception] + { + CreateInternalError("statement", exception); + } + +withCommonTableExpressionsAndXmlNamespacesStatements returns [StatementWithCtesAndXmlNamespaces vResult = null] +{ + WithCtesAndXmlNamespaces vWithCommonTableExpressionsAndXmlNamespaces = null; +} + : + ( + vWithCommonTableExpressionsAndXmlNamespaces=withCommonTableExpressionsAndXmlNamespaces + )? + ( + vResult=select[SubDmlFlags.SelectNotForInsert] + { + // check for invalid combination of CHANGE_TRACKING_CONTEXT and Select statement + if ((vWithCommonTableExpressionsAndXmlNamespaces != null) && (vWithCommonTableExpressionsAndXmlNamespaces.ChangeTrackingContext != null)) + ThrowParseErrorException("SQL46072", vWithCommonTableExpressionsAndXmlNamespaces.ChangeTrackingContext, TSqlParserResource.SQL46072Message); + } + | + vResult=deleteStatement[SubDmlFlags.None] + | + vResult=insertStatement[SubDmlFlags.None] + | + vResult=updateStatement[SubDmlFlags.None] + | + vResult=mergeStatement[SubDmlFlags.None] + ) + { + vResult.WithCtesAndXmlNamespaces = vWithCommonTableExpressionsAndXmlNamespaces; + } + ; + +lastStatement returns [TSqlStatement vResult = null] + : vResult=createProcedureStatement + | vResult=alterProcedureStatement + | vResult=createTriggerStatement + | vResult=alterTriggerStatement + | vResult=createDefaultStatement + | vResult=createRuleStatement + | vResult=createViewStatement + | vResult=alterViewStatement + | vResult=createFunctionStatement + | vResult=alterFunctionStatement + | vResult=createSchemaStatement + | vResult=createIdentifierStatement + | vResult=alterIdentifierStatement + | vResult=useFederationStatement + | vResult=createOrAlterStatements + ; + +createIdentifierStatement returns [TSqlStatement vResult] + : tCreate:Create + ( + {NextTokenMatches(CodeGenerationSupporter.Materialized)}? + vResult=createMaterializedViewStatement + | + {NextTokenMatches(CodeGenerationSupporter.Federation)}? + vResult=createFederationStatement + ) + { + UpdateTokenInfo(vResult,tCreate); + } + ; + +alterIdentifierStatement returns [TSqlStatement vResult] + : tAlter:Alter + ( + {NextTokenMatches(CodeGenerationSupporter.Materialized)}? + vResult=alterMaterializedViewStatement + | + {NextTokenMatches(CodeGenerationSupporter.Federation)}? + vResult=alterFederationStatement + ) + { + UpdateTokenInfo(vResult,tAlter); + } + ; + +createOrAlterStatements returns [TSqlStatement vResult] + : tCreate:Create Or Alter + ( + vResult = createOrAlterFunctionStatement + | vResult = createOrAlterProcedureStatement + | vResult = createOrAlterTriggerStatement + | vResult = createOrAlterViewStatement + ) + { + UpdateTokenInfo(vResult,tCreate); + } + ; + +// This rule conflicts with optSimpleExecute (both can start with Identifier) +// We should update predicates here and in optSimpleExecute at the same time +identifierStatements returns [TSqlStatement vResult] + : {NextTokenMatches(CodeGenerationSupporter.Disable)}? + vResult=disableTriggerStatement + | {NextTokenMatches(CodeGenerationSupporter.Enable)}? + vResult=enableTriggerStatement + | {NextTokenMatches(CodeGenerationSupporter.Move)}? + vResult = moveConversationStatement + | {NextTokenMatches(CodeGenerationSupporter.Get)}? + vResult = getConversationGroupStatement + | {NextTokenMatches(CodeGenerationSupporter.Receive)}? + vResult = receiveStatement + | {NextTokenMatches(CodeGenerationSupporter.Send)}? + vResult = sendStatement + | {NextTokenMatches(CodeGenerationSupporter.Throw)}? + vResult = throwStatement + | {NextTokenMatches(CodeGenerationSupporter.Rename)}? + vResult = renameEntityStatement + ; + + +disableTriggerStatement returns [EnableDisableTriggerStatement vResult = this.FragmentFactory.CreateFragment()] + : + tDisable:Identifier + { + Match(tDisable, CodeGenerationSupporter.Disable); + UpdateTokenInfo(vResult,tDisable); + vResult.TriggerEnforcement = TriggerEnforcement.Disable; + } + enableDisableTriggerBody[vResult] + ; + +enableTriggerStatement returns [EnableDisableTriggerStatement vResult = this.FragmentFactory.CreateFragment()] + : + tEnable:Identifier + { + Match(tEnable, CodeGenerationSupporter.Enable); + UpdateTokenInfo(vResult,tEnable); + vResult.TriggerEnforcement = TriggerEnforcement.Enable; + } + enableDisableTriggerBody[vResult] + ; + +enableDisableTriggerBody[EnableDisableTriggerStatement vParent] +{ + SchemaObjectName vSchemaObjectName; + TriggerObject vTriggerObject; +} + : + Trigger + ( + vSchemaObjectName=schemaObjectThreePartName + { + AddAndUpdateTokenInfo(vParent, vParent.TriggerNames,vSchemaObjectName); + } + ( + Comma vSchemaObjectName=schemaObjectThreePartName + { + AddAndUpdateTokenInfo(vParent, vParent.TriggerNames,vSchemaObjectName); + } + )* + | + All + { + vParent.All = true; + } + ) + On vTriggerObject=triggerObject + { + vParent.TriggerObject = vTriggerObject; + } + ; + +create2005Statements returns [TSqlStatement vResult = null] + : tCreate:Create + ( + {NextTokenMatches(CodeGenerationSupporter.Aggregate)}? + vResult=createAggregateStatement + | + {NextTokenMatches(CodeGenerationSupporter.Application)}? + vResult=createApplicationRoleStatement + | + {NextTokenMatches(CodeGenerationSupporter.Assembly)}? + vResult=createAssemblyStatement + | + {NextTokenMatches(CodeGenerationSupporter.Asymmetric)}? + vResult=createAsymmetricKeyStatement + | + {NextTokenMatches(CodeGenerationSupporter.Availability)}? + vResult=createAvailabilityGroupStatement + | + {NextTokenMatches(CodeGenerationSupporter.Broker)}? + vResult=createBrokerPriorityStatement + | + {NextTokenMatches(CodeGenerationSupporter.Certificate)}? + vResult=createCertificateStatement + | + {NextTokenMatches(CodeGenerationSupporter.Column)}? + vResult=createColumnStatements + | + {NextTokenMatches(CodeGenerationSupporter.ColumnStore)}? + vResult=createColumnStoreIndexStatement[null, null] + | + {NextTokenMatches(CodeGenerationSupporter.Contract)}? + vResult=createContractStatement + | + {NextTokenMatches(CodeGenerationSupporter.Credential)}? + vResult=createCredentialStatement + | + {NextTokenMatches(CodeGenerationSupporter.Cryptographic)}? + vResult=createCryptographicProviderStatement + | + {NextTokenMatches(CodeGenerationSupporter.Endpoint)}? + vResult=createEndpointStatement + | + {NextTokenMatches(CodeGenerationSupporter.Event)}? + vResult=createEventStatement // NOTIFICATION or SESSION + | + {NextTokenMatches(CodeGenerationSupporter.External)}? + vResult=createExternalStatements // EXTERNAL DATA SOURCE, FILE FORMAT, STREAM, TABLE, RESOURCE POOL, LIBRARY, LANGUAGE + | + {NextTokenMatches(CodeGenerationSupporter.Fulltext)}? + vResult=createFulltextStatement // Index or CATALOG + | + vResult=createPrimaryXmlIndexStatement + | + {NextTokenMatches(CodeGenerationSupporter.Selective)}? + vResult=createSelectiveXmlIndexStatement + | + {NextTokenMatches(CodeGenerationSupporter.Xml)}? + vResult=createXmlStatements // Index or Schema + | + {NextTokenMatches(CodeGenerationSupporter.Login)}? + vResult=createLoginStatement + | + {NextTokenMatches(CodeGenerationSupporter.Message)}? + vResult=createMessageTypeStatement + | + {NextTokenMatches(CodeGenerationSupporter.Master)}? + vResult=createMasterKeyStatement + | + {NextTokenMatches(CodeGenerationSupporter.Partition)}? + vResult=createPartitionStatement // SCHEME or Function + | + {NextTokenMatches(CodeGenerationSupporter.Queue)}? + vResult=createQueueStatement + | + {NextTokenMatches(CodeGenerationSupporter.Remote)}? + vResult=createRemoteServiceBindingStatement + | + {NextTokenMatches(CodeGenerationSupporter.Resource)}? + vResult=createResourcePoolStatement + | + {NextTokenMatches(CodeGenerationSupporter.Role)}? + vResult=createRoleStatement + | + {NextTokenMatches(CodeGenerationSupporter.Route)}? + vResult=createRouteStatement + | + {NextTokenMatches(CodeGenerationSupporter.Search)}? + vResult=createSearchPropertyListStatement + | + {NextTokenMatches(CodeGenerationSupporter.Service)}? + vResult=createServiceStatement + | + {NextTokenMatches(CodeGenerationSupporter.Spatial)}? + vResult=createSpatialIndexStatement + | + {NextTokenMatches(CodeGenerationSupporter.Symmetric)}? + vResult=createSymmetricKeyStatement + | + {NextTokenMatches(CodeGenerationSupporter.Synonym)}? + vResult=createSynonymStatement + | + {NextTokenMatches(CodeGenerationSupporter.Type)}? + vResult=createTypeStatement + | + {NextTokenMatches(CodeGenerationSupporter.Server)}? + vResult=createServerStatements //AUDIT or ROLE + | + {NextTokenMatches(CodeGenerationSupporter.Workload)}? + vResult=createWorkloadStatements + | + {NextTokenMatches(CodeGenerationSupporter.Sequence)}? + vResult=createSequenceStatement + | + {NextTokenMatches(CodeGenerationSupporter.Security)}? + vResult=createSecurityPolicyStatement + | + vResult=createUserStatement + ) + { + UpdateTokenInfo(vResult,tCreate); + ThrowPartialAstIfPhaseOne(vResult); + } + ; + exception + catch[PhaseOnePartialAstException exception] + { + UpdateTokenInfo(exception.Statement, tCreate); + throw; + } + +createAggregateStatement returns [CreateAggregateStatement vResult = FragmentFactory.CreateFragment()] +{ + SchemaObjectName vSchemaObjectName; + ProcedureParameter vParameter; + AssemblyName vAssemblyName; + DataTypeReference vDataType; +} + : tAggregate:Identifier vSchemaObjectName=schemaObjectThreePartName + { + Match(tAggregate, CodeGenerationSupporter.Aggregate); + CheckTwoPartNameForSchemaObjectName(vSchemaObjectName, CodeGenerationSupporter.Aggregate); + vResult.Name = vSchemaObjectName; + ThrowPartialAstIfPhaseOne(vResult); + } + LeftParenthesis vParameter = aggregateParameter + { + AddAndUpdateTokenInfo(vResult, vResult.Parameters, vParameter); + } + (Comma vParameter = aggregateParameter + { + AddAndUpdateTokenInfo(vResult, vResult.Parameters, vParameter); + } + )* + RightParenthesis + tReturns:Identifier vDataType = scalarDataType + { + Match(tReturns,CodeGenerationSupporter.Returns); + vResult.ReturnType = vDataType; + } + External vAssemblyName = assemblyName + { + vResult.AssemblyName = vAssemblyName; + } + ; + +aggregateParameter returns [ProcedureParameter vResult = FragmentFactory.CreateFragment()] +{ + Identifier vParamName; + DataTypeReference vDataType; + NullableConstraintDefinition vNullableConstraintDefinition; +} + : vParamName = identifierVariable (As)? vDataType = scalarDataType + { + vResult.VariableName = vParamName; + vResult.DataType = vDataType; + } + ( + vNullableConstraintDefinition = nullableConstraint + { + vResult.Nullable=vNullableConstraintDefinition; + } + )? + ; + +createApplicationRoleStatement returns [CreateApplicationRoleStatement vResult = this.FragmentFactory.CreateFragment()] + : + applicationRoleStatement[vResult, true] + ; + +createAssemblyStatement returns [CreateAssemblyStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + AssemblyOption vOption; +} + : tAssembly:Identifier vIdentifier=identifier + { + Match(tAssembly, CodeGenerationSupporter.Assembly); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + From expressionList[vResult, vResult.Parameters] + ( + // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + With tPermissionSet:Identifier EqualsSign + vOption=assemblyPermissionSetOption[tPermissionSet] + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + )? + ; + +createExternalLibraryStatement returns [CreateExternalLibraryStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + ExternalLibraryFileOption vFileOption; + StringLiteral vLanguage; +} + : tLibrary:Identifier vIdentifier=identifier + { + Match(tLibrary, CodeGenerationSupporter.Library); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + From vFileOption=createExternalLibraryFileOption + { + vResult.ExternalLibraryFiles.Add(vFileOption); + } + (Comma vFileOption = createExternalLibraryFileOption + { + vResult.ExternalLibraryFiles.Add(vFileOption); + } + )* + With LeftParenthesis tLanguage:Identifier EqualsSign vLanguage=nonEmptyString + { + Match(tLanguage, CodeGenerationSupporter.Language); + vResult.Language = vLanguage; + } + RightParenthesis + ; + +alterExternalLibraryStatement returns [AlterExternalLibraryStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + ExternalLibraryFileOption vFileOption; + StringLiteral vLanguage; +} + : tLibrary:Identifier vIdentifier=identifier + { + Match(tLibrary, CodeGenerationSupporter.Library); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + Set vFileOption=alterExternalLibraryFileOption + { + vResult.ExternalLibraryFiles.Add(vFileOption); + } + With LeftParenthesis tLanguage:Identifier EqualsSign vLanguage=nonEmptyString + { + Match(tLanguage, CodeGenerationSupporter.Language); + vResult.Language = vLanguage; + } + RightParenthesis + ; + +dropExternalLibraryStatement returns [DropExternalLibraryStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : tLibrary:Identifier vIdentifier=identifier + { + Match(tLibrary, CodeGenerationSupporter.Library); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + ; + +binaryOrString returns [ValueExpression vResult] + : + vResult=binary + | + vResult=stringLiteral + ; + +createExternalLibraryFileOption returns [ExternalLibraryFileOption vResult = FragmentFactory.CreateFragment()] +{ + ScalarExpression vContent; + Identifier vPlatform; +} + : LeftParenthesis tContent:Identifier EqualsSign vContent=binaryOrString + { + Match(tContent, CodeGenerationSupporter.Content); + vResult.Content = vContent; + } + (Comma tPlatform:Identifier EqualsSign vPlatform=identifier + { + Match(tPlatform, CodeGenerationSupporter.Platform); + vResult.Platform = vPlatform; + } + )* + RightParenthesis + ; + +alterExternalLibraryFileOption returns [ExternalLibraryFileOption vResult = FragmentFactory.CreateFragment()] +{ + ScalarExpression vContent; + Identifier vPlatform; +} + : LeftParenthesis tContent:Identifier EqualsSign vContent=binaryOrString + { + Match(tContent, CodeGenerationSupporter.Content); + vResult.Content = vContent; + } + (Comma tPlatform:Identifier EqualsSign vPlatform=identifier + { + Match(tPlatform, CodeGenerationSupporter.Platform); + vResult.Platform = vPlatform; + } + )* + RightParenthesis + ; + +createExternalLanguageStatement returns [CreateExternalLanguageStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + ExternalLanguageFileOption vFileOption; +} + : tLanguage:Identifier vIdentifier=identifier + { + Match(tLanguage, CodeGenerationSupporter.Language); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + From vFileOption=externalLanguageFileOption + { + vResult.ExternalLanguageFiles.Add(vFileOption); + } + (Comma vFileOption = externalLanguageFileOption + { + vResult.ExternalLanguageFiles.Add(vFileOption); + } + )* + ; + +externalLanguageFileOption returns [ExternalLanguageFileOption vResult = FragmentFactory.CreateFragment()] +{ + ScalarExpression vContent; + StringLiteral vFileName; + Identifier vPlatform; + StringLiteral vParameters; + StringLiteral vEnvironmentVariables; +} + : LeftParenthesis tContent:Identifier EqualsSign vContent=binaryOrString + { + Match(tContent, CodeGenerationSupporter.Content); + vResult.Content = vContent; + } + Comma tFileName:Identifier EqualsSign vFileName=nonEmptyString + { + Match(tFileName, CodeGenerationSupporter.File_Name); + vResult.FileName = vFileName; + } + ( + tComma:Comma + ( + {NextTokenMatches(CodeGenerationSupporter.Platform)}? + ( + tPlatform:Identifier EqualsSign vPlatform=identifier + { + Match(tPlatform, CodeGenerationSupporter.Platform); + vResult.Platform = vPlatform; + } + ) + | + {NextTokenMatches(CodeGenerationSupporter.Parameters)}? + ( + tParameters:Identifier EqualsSign vParameters=nonEmptyString + { + Match(tParameters, CodeGenerationSupporter.Parameters); + vResult.Parameters = vParameters; + } + ) + | + {NextTokenMatches(CodeGenerationSupporter.EnvironmentVariables)}? + ( + tEnvironmentVariables:Identifier EqualsSign vEnvironmentVariables=nonEmptyString + { + Match(tEnvironmentVariables, CodeGenerationSupporter.EnvironmentVariables); + vResult.EnvironmentVariables = vEnvironmentVariables; + } + ) + ) + )* + + RightParenthesis + ; + +alterExternalLanguageStatement returns [AlterExternalLanguageStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + ExternalLanguageFileOption vFileOption; + Identifier vPlatform; +} + : tLanguage:Identifier vIdentifier=identifier + { + Match(tLanguage, CodeGenerationSupporter.Language); + vResult.Name = vIdentifier; + vResult.Operation=new Identifier(); + ThrowPartialAstIfPhaseOne(vResult); + } + ( + authorizationOpt[vResult] + ( + {NextTokenMatches(CodeGenerationSupporter.Set)}? + ( + Set vFileOption=externalLanguageFileOption + { + vResult.ExternalLanguageFiles.Add(vFileOption); + vResult.Operation.Value = CodeGenerationSupporter.Set; + } + ) + | + {NextTokenMatches(CodeGenerationSupporter.Add)}? + ( + Add vFileOption=externalLanguageFileOption + { + vResult.ExternalLanguageFiles.Add(vFileOption); + vResult.Operation.Value = CodeGenerationSupporter.Add; + } + ) + | + {NextTokenMatches(CodeGenerationSupporter.Remove)}? + ( + tRemove:Identifier tPlatform:Identifier vPlatform=identifier + { + Match(tRemove, CodeGenerationSupporter.Remove); + vResult.Platform = vPlatform; + vResult.Operation.Value = CodeGenerationSupporter.Remove; + } + ) + ) + ) + + ; + +dropExternalLanguageStatement returns [DropExternalLanguageStatement vResult = this.FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : tLanguage:Identifier vIdentifier=identifier + { + Match(tLanguage, CodeGenerationSupporter.Language); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + ; + +createAsymmetricKeyStatement returns [CreateAsymmetricKeyStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + Literal vPassword; +} + : tAsymmetric:Identifier Key vIdentifier=identifier + { + Match(tAsymmetric, CodeGenerationSupporter.Asymmetric); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + createAsymmetricKeyParams[vResult] + ( + // Greedy due to linear approximation introduced after the rule securityStatementPermission + options {greedy = true; } : + vPassword = encryptClause + { + vResult.Password = vPassword; + } + )? + ; + +createAsymmetricKeyParams[CreateAsymmetricKeyStatement vParent] +{ + EncryptionSource vSource; +} + : From vSource = asymKeySource + { + vParent.KeySource=vSource; + } + | With asymKeySpec[vParent] + ; + +asymKeySource returns [EncryptionSource vResult] + : + vResult = fileEncryptionSource + | {NextTokenMatches(CodeGenerationSupporter.Assembly)}? + vResult = assemblyEncryptionSource + | vResult = providerEncryptionSource + ; + +assemblyEncryptionSource returns [AssemblyEncryptionSource vResult=FragmentFactory.CreateFragment()] +{ + Identifier vAssembly; +} + : tAssembly:Identifier vAssembly = identifier + { + Match(tAssembly, CodeGenerationSupporter.Assembly); + vResult.Assembly = vAssembly; + } + ; + +providerEncryptionSource returns [ProviderEncryptionSource vResult = FragmentFactory.CreateFragment()] +{ + Identifier vProviderName; +} + : tProvider:Identifier vProviderName = identifier + { + Match(tProvider, CodeGenerationSupporter.Provider); + vResult.Name = vProviderName; + } + providerKeySourceOptions[vResult.KeyOptions, vResult] + ; + +fileEncryptionSource returns [FileEncryptionSource vResult = FragmentFactory.CreateFragment()] +{ + Literal vFile; +} + : (tExecutable:Identifier + { + Match(tExecutable, CodeGenerationSupporter.Executable); + vResult.IsExecutable = true; + } + )? + File EqualsSign vFile = stringLiteral + { + vResult.File = vFile; + } + ; + +asymKeySpec [CreateAsymmetricKeyStatement vParent] + : tAlgorithm:Identifier EqualsSign tRealAlg:Identifier + { + Match(tAlgorithm,CodeGenerationSupporter.Algorithm); + vParent.EncryptionAlgorithm = EncryptionAlgorithmsHelper.Instance.ParseOption(tRealAlg); + UpdateTokenInfo(vParent,tRealAlg); + } + ; + +createCertificateStatement returns [CreateCertificateStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : tCertificate:Identifier vIdentifier=identifier + { + Match(tCertificate, CodeGenerationSupporter.Certificate); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + createCertificateParams[vResult] + ( + // Greedy due to linear approximation introduced after the rule securityStatementPermission + options {greedy = true; } : + createCertificateActivityFlag[vResult] + )? + ; + +createCertificateParams [CreateCertificateStatement vParent] +{ + Literal vPassword; + CertificateOption vOption; + CertificateOptionKinds encounteredOptions = CertificateOptionKinds.None; +} + : From certificateSource[vParent] + | ( + (vPassword = encryptClause + { + vParent.EncryptionPassword = vPassword; + } + )? + With vOption = certificateOption[encounteredOptions] + { + encounteredOptions = encounteredOptions | vOption.Kind; + AddAndUpdateTokenInfo(vParent, vParent.CertificateOptions,vOption); + } + (Comma vOption = certificateOption[encounteredOptions] + { + encounteredOptions = encounteredOptions | vOption.Kind; + AddAndUpdateTokenInfo(vParent, vParent.CertificateOptions,vOption); + } + )* + ) + ; + +createCertificateActivityFlag [CertificateStatementBase vParent] +{ + OptionState vOptionState; +} + : tActive:Identifier For tBeginDialog:Identifier EqualsSign vOptionState = optionOnOff[vParent] + { + Match(tActive,CodeGenerationSupporter.Active); + Match(tBeginDialog,CodeGenerationSupporter.BeginDialog); + vParent.ActiveForBeginDialog = vOptionState; + } + ; + +certificateOption [CertificateOptionKinds encountered]returns [CertificateOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : tOption:Identifier EqualsSign vValue = stringLiteral + { + vResult.Kind = CertificateOptionKindsHelper.Instance.ParseOption(tOption); + vResult.Value = vValue; + CheckCertificateOptionDupication(encountered,vResult.Kind,tOption); + } + ; + +certificateSource [CreateCertificateStatement vParent] +{ + EncryptionSource vCertificateSource; +} + : + ( + vCertificateSource=fileEncryptionSource + ( + // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + With privateKeySpec[vParent] + )? + | + vCertificateSource = assemblyEncryptionSource + ) + { + vParent.CertificateSource = vCertificateSource; + } + ; + +encryptClause returns [Literal vResult] + : tEncryption:Identifier By tPassword:Identifier EqualsSign vResult = stringLiteral + { + Match(tEncryption,CodeGenerationSupporter.Encryption); + Match(tPassword,CodeGenerationSupporter.Password); + } + ; + +privateKeySpec [CertificateStatementBase vParent] + : tPrivate:Identifier Key LeftParenthesis certificatePrivateKeySpec[vParent] (Comma certificatePrivateKeySpec[vParent])* tRParen:RightParenthesis + { + Match(tPrivate,CodeGenerationSupporter.Private); + UpdateTokenInfo(vParent,tRParen); + } + ; + +certificatePrivateKeySpec [CertificateStatementBase vParent] +{ + Literal vFilePath; +} + : passwordChangeOption[vParent] + | tFile:File EqualsSign vFilePath = stringLiteral + { + if (vParent.PrivateKeyPath != null) + throw GetUnexpectedTokenErrorException(tFile); + else + vParent.PrivateKeyPath = vFilePath; + } + ; + +passwordChangeOption [IPasswordChangeOption vParent] +{ + Literal vPassword; +} + : tEncryptionDecryption:Identifier By tPassword:Identifier EqualsSign vPassword = stringLiteral + { + if (TryMatch(tEncryptionDecryption,CodeGenerationSupporter.Encryption)) + { + if (vParent.EncryptionPassword != null) + throw GetUnexpectedTokenErrorException(tEncryptionDecryption); + else + vParent.EncryptionPassword = vPassword; + } + else + { + Match(tEncryptionDecryption,CodeGenerationSupporter.Decryption); + if (vParent.DecryptionPassword != null) + throw GetUnexpectedTokenErrorException(tEncryptionDecryption); + else + vParent.DecryptionPassword = vPassword; + } + } + ; + + +createContractStatement returns [CreateContractStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; + ContractMessage vMessage; +} + : tContract:Identifier vIdentifier=identifier + { + Match(tContract, CodeGenerationSupporter.Contract); + vResult.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vResult); + } + authorizationOpt[vResult] + LeftParenthesis vMessage = contractMessage + { + AddAndUpdateTokenInfo(vResult, vResult.Messages,vMessage); + } + (Comma vMessage = contractMessage + { + AddAndUpdateTokenInfo(vResult, vResult.Messages,vMessage); + } + )* + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + } + ; + +contractMessage returns [ContractMessage vResult = FragmentFactory.CreateFragment()] +{ + Identifier vMessageName; +} + : vMessageName = identifier tSent:Identifier By + { + Match(tSent,CodeGenerationSupporter.Sent); + vResult.Name = vMessageName; + } + ( tAny:Any + { + vResult.SentBy = MessageSender.Any; + UpdateTokenInfo(vResult,tAny); + } + | tInitiatorTarget:Identifier + { + if (TryMatch(tInitiatorTarget,CodeGenerationSupporter.Initiator)) + vResult.SentBy = MessageSender.Initiator; + else + { + Match(tInitiatorTarget,CodeGenerationSupporter.Target); + vResult.SentBy = MessageSender.Target; + } + UpdateTokenInfo(vResult,tInitiatorTarget); + } + ) + ; + +createDatabaseScopedCredentialStatement returns [CreateCredentialStatement vResult = FragmentFactory.CreateFragment()] + : tScoped:Identifier + { + Match(tScoped, CodeGenerationSupporter.Scoped); + vResult.IsDatabaseScoped = true; + } + + credentialStatementBody[vResult] + + ; + +createCredentialStatement returns [CreateCredentialStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vCryptographicProviderName; + vResult.IsDatabaseScoped = false; +} + : credentialStatementBody[vResult] + ( + For tCryptographic:Identifier tProvider:Identifier vCryptographicProviderName=identifier + { + Match(tCryptographic, CodeGenerationSupporter.Cryptographic); + Match(tProvider, CodeGenerationSupporter.Provider); + vResult.CryptographicProviderName = vCryptographicProviderName; + } + )? + ; + +credentialStatementBody [CredentialStatement vParent] +{ + Identifier vIdentifier; + Literal vLiteral; +} + : tCredential:Identifier vIdentifier=identifier + { + Match(tCredential, CodeGenerationSupporter.Credential); + vParent.Name = vIdentifier; + ThrowPartialAstIfPhaseOne(vParent); + } + With Identity EqualsSign vLiteral = stringLiteral + { + vParent.Identity = vLiteral; + } + (Comma tSecret:Identifier EqualsSign vLiteral = stringLiteral + { + Match(tSecret,CodeGenerationSupporter.Secret); + vParent.Secret = vLiteral; + } + )? + ; + +createServerStatements returns [TSqlStatement vResult] + : tServer:Identifier + { + Match(tServer, CodeGenerationSupporter.Server); + } + ( + {NextTokenMatches(CodeGenerationSupporter.Audit)}? + vResult=createServerAuditStatements + | + {NextTokenMatches(CodeGenerationSupporter.Role)}? + vResult=createServerRoleStatement + ) + ; + +createServerAuditStatements returns [TSqlStatement vResult] + : tAudit:Identifier + { + Match(tAudit, CodeGenerationSupporter.Audit); + } + ( + vResult = createServerAuditSpecificationStatement + | + vResult = createServerAuditStatement + ) + ; + +createServerAuditStatement returns [CreateServerAuditStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vAuditName; + AuditTarget vTarget; + BooleanExpression vFilterPredicate; +} + : vAuditName = identifier + { + vResult.AuditName = vAuditName; + ThrowPartialAstIfPhaseOne(vResult); + } + vTarget = auditTargetClause[true] + { + vResult.AuditTarget = vTarget; + } + ( // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + auditCreateWithClause[vResult] + )? + ( + Where vFilterPredicate=eventBooleanExpression + { + vResult.PredicateExpression = vFilterPredicate; + } + )? + ; + +auditTargetClause [bool pathRequired] returns [AuditTarget vResult = FragmentFactory.CreateFragment()] +{ + AuditTargetOption vOption; + bool pathOptionEncountered = false; +} + : tTo:To + { + UpdateTokenInfo(vResult,tTo); + } + ( + ( + tFile:File LeftParenthesis vOption = auditFileOption + { + vResult.TargetKind = AuditTargetKind.File; + AddAndUpdateTokenInfo(vResult, vResult.TargetOptions, vOption); + + pathOptionEncountered |= (vOption.OptionKind==AuditTargetOptionKind.FilePath); + } + ( + Comma vOption = auditFileOption + { + AddAndUpdateTokenInfo(vResult, vResult.TargetOptions, vOption); + + pathOptionEncountered |= vOption.OptionKind==AuditTargetOptionKind.FilePath; + } + )* + | + tUrl:Identifier LeftParenthesis vOption = auditUrlOption + { + Match(tUrl, CodeGenerationSupporter.Url); + vResult.TargetKind = AuditTargetKind.Url; + AddAndUpdateTokenInfo(vResult, vResult.TargetOptions, vOption); + + pathOptionEncountered |= (vOption.OptionKind==AuditTargetOptionKind.Path); + } + ( + Comma vOption = auditUrlOption + { + AddAndUpdateTokenInfo(vResult, vResult.TargetOptions, vOption); + + pathOptionEncountered |= vOption.OptionKind==AuditTargetOptionKind.Path; + } + )? + ) + tRParen:RightParenthesis + { + UpdateTokenInfo(vResult,tRParen); + if (pathRequired && !pathOptionEncountered) + { + if(tFile != null) + { + ThrowParseErrorException("SQL46056", tFile, TSqlParserResource.SQL46056Message); + } + else + { + ThrowParseErrorException("SQL46126", tUrl, TSqlParserResource.SQL46126Message); + } + } + } + | + tApplicationLogSecurityLogExternalMonitor:Identifier + { + if (TryMatch(tApplicationLogSecurityLogExternalMonitor, CodeGenerationSupporter.ApplicationLog)) + { + vResult.TargetKind = AuditTargetKind.ApplicationLog; + } + else if (TryMatch(tApplicationLogSecurityLogExternalMonitor, CodeGenerationSupporter.SecurityLog)) + { + vResult.TargetKind = AuditTargetKind.SecurityLog; + } + else + { + Match(tApplicationLogSecurityLogExternalMonitor, CodeGenerationSupporter.ExternalMonitor); + vResult.TargetKind = AuditTargetKind.ExternalMonitor; + } + UpdateTokenInfo(vResult,tApplicationLogSecurityLogExternalMonitor); + } + ) + ; + +// Corresponds to audit_file_option_element in SQL yacc grammar +auditFileOption returns [AuditTargetOption vResult = null] + : + {NextTokenMatches(CodeGenerationSupporter.MaxSize)}? + vResult = maxSizeAuditFileOption + | + {NextTokenMatches(CodeGenerationSupporter.MaxRolloverFiles)}? + vResult = maxRolloverFilesAuditFileOption + | + {NextTokenMatches(CodeGenerationSupporter.ReserveDiskSpace)}? + vResult = reserveDiskSpaceAuditFileOption + | + {NextTokenMatches(CodeGenerationSupporter.MaxFiles)}? + vResult = maxFilesAuditFileOption + | + vResult = pathAuditFileOption + ; + +auditUrlOption returns [AuditTargetOption vResult = null] + : + {NextTokenMatches(CodeGenerationSupporter.RetentionDays)}? + vResult = retentionDaysAuditUrlOption + | + vResult = pathAuditFileOption + ; + +maxSizeAuditFileOption returns [MaxSizeAuditTargetOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vSize; +} + : tOption:Identifier EqualsSign + { + Match(tOption, CodeGenerationSupporter.MaxSize); + vResult.OptionKind=AuditTargetOptionKind.MaxSize; + UpdateTokenInfo(vResult, tOption); + } + ( + vSize = integer tUnit:Identifier + { + vResult.Size = vSize; + + if (TryMatch(tUnit, CodeGenerationSupporter.GB)) + { + vResult.Unit = MemoryUnit.GB; + ThrowIfTooLargeAuditFileSize(vSize, 10); + } + else if (TryMatch(tUnit, CodeGenerationSupporter.TB)) + { + vResult.Unit = MemoryUnit.TB; + ThrowIfTooLargeAuditFileSize(vSize, 20); + } + else + { + Match(tUnit, CodeGenerationSupporter.MB); + vResult.Unit = MemoryUnit.MB; + ThrowIfTooLargeAuditFileSize(vSize, 0); + } + + UpdateTokenInfo(vResult, tUnit); + } + | + tUnlimited:Identifier + { + Match(tUnlimited, CodeGenerationSupporter.Unlimited); + vResult.IsUnlimited = true; + vResult.Size = null; + vResult.Unit = MemoryUnit.Unspecified; + } + ) + ; + +retentionDaysAuditUrlOption returns [RetentionDaysAuditTargetOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vDays; +} + : tRetentionDays:Identifier EqualsSign + { + Match(tRetentionDays, CodeGenerationSupporter.RetentionDays); + vResult.OptionKind=AuditTargetOptionKind.RetentionDays; + UpdateTokenInfo(vResult, tRetentionDays); + } + ( + vDays = integer + { + vResult.Days = vDays; + } + ) + ; + +maxRolloverFilesAuditFileOption returns [MaxRolloverFilesAuditTargetOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : tOption:Identifier EqualsSign + { + Match(tOption, CodeGenerationSupporter.MaxRolloverFiles); + vResult.OptionKind=AuditTargetOptionKind.MaxRolloverFiles; + UpdateTokenInfo(vResult, tOption); + } + ( + vValue = integer + { + vResult.Value = vValue; + } + | + tUnlimited:Identifier + { + Match(tUnlimited, CodeGenerationSupporter.Unlimited); + vResult.IsUnlimited = true; + UpdateTokenInfo(vResult, tUnlimited); + } + ) + ; + +maxFilesAuditFileOption returns [LiteralAuditTargetOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : tOption:Identifier EqualsSign vValue = integer + { + Match(tOption, CodeGenerationSupporter.MaxFiles); + vResult.OptionKind=AuditTargetOptionKind.MaxFiles; + UpdateTokenInfo(vResult, tOption); + vResult.Value = vValue; + } + ; + +reserveDiskSpaceAuditFileOption returns [OnOffAuditTargetOption vResult = FragmentFactory.CreateFragment()] +{ + OptionState vValue; +} + : tOption:Identifier EqualsSign vValue = optionOnOff[vResult] + { + Match(tOption, CodeGenerationSupporter.ReserveDiskSpace); + vResult.OptionKind=AuditTargetOptionKind.ReserveDiskSpace; + UpdateTokenInfo(vResult, tOption); + vResult.Value = vValue; + } + ; + +pathAuditFileOption returns [LiteralAuditTargetOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : tOption:Identifier EqualsSign vValue = stringLiteral + { + if(TryMatch(tOption, CodeGenerationSupporter.FilePath)) + { + vResult.OptionKind=AuditTargetOptionKind.FilePath; + } + else + { + Match(tOption, CodeGenerationSupporter.Path); + vResult.OptionKind=AuditTargetOptionKind.Path; + } + UpdateTokenInfo(vResult, tOption); + vResult.Value = vValue; + } + ; + +auditCreateWithClause [ServerAuditStatement vParent] +{ + AuditOption vOption; +} + : With LeftParenthesis vOption = auditCreateOption + { + AddAndUpdateTokenInfo(vParent, vParent.Options, vOption); + } + (Comma vOption = auditCreateOption + { + AddAndUpdateTokenInfo(vParent, vParent.Options, vOption); + } + )* + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent, tRParen); + } + ; + +auditWithClause [ServerAuditStatement vParent] +{ + AuditOption vOption; +} + : With LeftParenthesis vOption = auditOption + { + AddAndUpdateTokenInfo(vParent, vParent.Options, vOption); + } + (Comma vOption = auditOption + { + AddAndUpdateTokenInfo(vParent, vParent.Options, vOption); + } + )* + tRParen:RightParenthesis + { + UpdateTokenInfo(vParent, tRParen); + } + ; + +// Corresponds to audit_create_option_element in SQL yacc +auditCreateOption returns [AuditOption vResult] + : + tOption:Identifier EqualsSign + ( + vResult = queueDelayAuditOption[tOption] + | + vResult = onFailureAuditOption[tOption] + | + vResult = auditGuidAuditOption[tOption] + | + vResult = operatorAuditOption[tOption] + ) + ; + +// Corresponds to audit_option_element in SQL yacc +auditOption returns [AuditOption vResult] + : + tOption:Identifier EqualsSign + ( + vResult = queueDelayAuditOption[tOption] + | + vResult = onFailureAuditOption[tOption] + | + {TryMatch(tOption, CodeGenerationSupporter.OperatorAudit)}? + vResult = operatorAuditOption[tOption] + | + {TryMatch(tOption, CodeGenerationSupporter.State)}? + vResult = stateAuditOption[tOption] + ) + ; + +queueDelayAuditOption [IToken tOption] returns [QueueDelayAuditOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : vValue = integer + { + Match(tOption, CodeGenerationSupporter.QueueDelay); + vResult.OptionKind=AuditOptionKind.QueueDelay; + UpdateTokenInfo(vResult,tOption); + vResult.Delay = vValue; + } + ; + +onFailureAuditOption [IToken tOption] returns [OnFailureAuditOption vResult = FragmentFactory.CreateFragment()] + : + { + Match(tOption, CodeGenerationSupporter.OnFailure); + UpdateTokenInfo(vResult,tOption); + vResult.OptionKind=AuditOptionKind.OnFailure; + } + ( + tContinue:Continue + { + UpdateTokenInfo(vResult,tContinue); + vResult.OnFailureAction = AuditFailureActionType.Continue; + } + | + tShutdown:Shutdown + { + UpdateTokenInfo(vResult,tShutdown); + vResult.OnFailureAction = AuditFailureActionType.Shutdown; + } + | + tIdentifier:Identifier + { + Match(tIdentifier, CodeGenerationSupporter.FailOperation); + UpdateTokenInfo(vResult, tIdentifier); + vResult.OnFailureAction = AuditFailureActionType.FailOperation; + } + ) + ; + +auditGuidAuditOption [IToken tOption] returns [AuditGuidAuditOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : vValue = stringLiteral + { + Match(tOption, CodeGenerationSupporter.AuditGuid); + ThrowIfWrongGuidFormat(vValue); + vResult.OptionKind=AuditOptionKind.AuditGuid; + UpdateTokenInfo(vResult,tOption); + vResult.Guid = vValue; + } + ; + +operatorAuditOption [IToken tOption] returns [OperatorAuditOption vResult = FragmentFactory.CreateFragment()] +{ + OptionState vValue; +} + : vValue = optionOnOff[vResult] + { + Match(tOption, CodeGenerationSupporter.OperatorAudit); + vResult.OptionKind=AuditOptionKind.OperatorAudit; + UpdateTokenInfo(vResult,tOption); + vResult.Value = vValue; + } + ; + +stateAuditOption [IToken tOption] returns [StateAuditOption vResult = FragmentFactory.CreateFragment()] +{ + OptionState vValue; +} + : vValue = optionOnOff[vResult] + { + Match(tOption, CodeGenerationSupporter.State); + vResult.OptionKind=AuditOptionKind.State; + UpdateTokenInfo(vResult,tOption); + vResult.Value = vValue; + } + ; + +createServerAuditSpecificationStatement returns [CreateServerAuditSpecificationStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vAuditSpecName; + AuditSpecificationPart vPart; +} + : tSpecification:Identifier vAuditSpecName = identifier + { + Match(tSpecification, CodeGenerationSupporter.Specification); + vResult.SpecificationName = vAuditSpecName; + ThrowPartialAstIfPhaseOne(vResult); + } + auditSpecificationForClause[vResult] + ( // Conflicts with Add SIGNATURE (but it actually shouldn't, k=2 should be enough) + (Add LeftParenthesis) => + vPart = createAuditSpecificationDetail + { + AddAndUpdateTokenInfo(vResult, vResult.Parts, vPart); + } + (Comma vPart = createAuditSpecificationDetail + { + AddAndUpdateTokenInfo(vResult, vResult.Parts, vPart); + } + )* + )? + auditSpecificationStateOpt[vResult] + ; + +alterServerStatements returns [TSqlStatement vResult] + : + tServer:Identifier + { + Match(tServer, CodeGenerationSupporter.Server); + } + ( + {NextTokenMatches(CodeGenerationSupporter.Audit)}? + vResult = alterServerAuditStatements + | + {NextTokenMatches(CodeGenerationSupporter.Configuration)}? + vResult = alterServerConfigurationStatement + | + {NextTokenMatches(CodeGenerationSupporter.Role)}? + vResult = alterServerRoleStatement + ) + ; + +alterServerAuditStatements returns [TSqlStatement vResult] + : tAudit:Identifier + { + Match(tAudit, CodeGenerationSupporter.Audit); + } + ( + {NextTokenMatches(CodeGenerationSupporter.Specification)}? + vResult = alterServerAuditSpecificationStatement + | + vResult = alterServerAuditStatement + ) + ; + +alterServerAuditSpecificationStatement returns [AlterServerAuditSpecificationStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vAuditSpecName; + AuditSpecificationPart vPart; +} + : tSpecification:Identifier vAuditSpecName = identifier + { + Match(tSpecification, CodeGenerationSupporter.Specification); + vResult.SpecificationName = vAuditSpecName; + ThrowPartialAstIfPhaseOne(vResult); + } + (auditSpecificationForClause[vResult])? + ( // Conflicts with Add SIGNATURE and Drop statements + ((Add|Drop) LeftParenthesis) => + vPart = auditSpecificationDetail + { + AddAndUpdateTokenInfo(vResult, vResult.Parts, vPart); + } + (Comma vPart = auditSpecificationDetail + { + AddAndUpdateTokenInfo(vResult, vResult.Parts, vPart); + } + )* + )? + auditSpecificationStateOpt[vResult] + ; + +alterServerAuditStatement returns [AlterServerAuditStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vAuditName; + Identifier vNewName; + AuditTarget vTarget = null; + BooleanExpression vFilterPredicate = null; +} + : vAuditName = identifier + { + vResult.AuditName = vAuditName; + ThrowPartialAstIfPhaseOne(vResult); + } + ( + {NextTokenMatches(CodeGenerationSupporter.Modify)}? + tModify:Identifier tName:Identifier EqualsSign vNewName = identifier + { + Match(tModify, CodeGenerationSupporter.Modify); + Match(tName, CodeGenerationSupporter.Name); + vResult.NewName = vNewName; + } + | + ( + vTarget = auditTargetClause[false] + { + vResult.AuditTarget = vTarget; + } + )? + ( // Greedy due to conflict with withCommonTableExpressionsAndXmlNamespaces + options {greedy = true; } : + auditWithClause[vResult] + )? + ( + Where vFilterPredicate=eventBooleanExpression + { + vResult.PredicateExpression = vFilterPredicate; + } + )? + { + if(vTarget == null && (vResult.Options == null || vResult.Options.Count == 0) && vFilterPredicate == null) + { + ThrowIncorrectSyntaxErrorException(vAuditName); + } + } + | + tRemove:Identifier tWhere:Where + { + Match(tRemove, CodeGenerationSupporter.Remove); + UpdateTokenInfo(vResult, tWhere); + vResult.RemoveWhere=true; + } + ) + ; + +alterServerConfigurationStatement returns [TSqlStatement vResult] + : + tConfiguration:Identifier Set + { + Match(tConfiguration, CodeGenerationSupporter.Configuration); + } + ( + {NextTokenMatches(CodeGenerationSupporter.Process)}? + vResult = alterServerConfigurationSetProcessAffinityStatement + | + {NextTokenMatches(CodeGenerationSupporter.Buffer)}? + vResult = alterServerConfigurationSetBufferPoolExtensionStatement + | + {NextTokenMatches(CodeGenerationSupporter.Diagnostics)}? + vResult = alterServerConfigurationSetDiagnosticsLogStatement + | + {NextTokenMatches(CodeGenerationSupporter.Failover)}? + vResult = alterServerConfigurationSetFailoverClusterPropertyStatement + | + {NextTokenMatches(CodeGenerationSupporter.Hadr)}? + vResult = alterServerConfigurationSetHadrClusterStatement + | + {NextTokenMatches(CodeGenerationSupporter.SoftNuma)}? + vResult = alterServerConfigurationSetSoftNumaStatement + | + {NextTokenMatches(CodeGenerationSupporter.External)}? + vResult = alterServerConfigurationSetExternalAuthenticationStatement + ) + ; + + +alterServerConfigurationSetExternalAuthenticationStatement returns [AlterServerConfigurationSetExternalAuthenticationStatement vResult = FragmentFactory.CreateFragment()] +{ + AlterServerConfigurationExternalAuthenticationOption vOption; +} + : tExternal:External tAuthentication:Identifier + { + Match(tExternal, CodeGenerationSupporter.External); + Match(tAuthentication, CodeGenerationSupporter.Authentication); + } + vOption=alterServerConfigurationExternalAuthenticationContainerOption + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + ; + +alterServerConfigurationExternalAuthenticationContainerOption returns [AlterServerConfigurationExternalAuthenticationContainerOption vResult = FragmentFactory.CreateFragment()] +{ + OnOffOptionValue vOptionValue; + AlterServerConfigurationExternalAuthenticationOption vAlterServerConfigurationExternalAuthenticationOption; +} + : vOptionValue=onOffOptionValue + { + vResult.OptionValue = vOptionValue; + vResult.OptionKind = AlterServerConfigurationExternalAuthenticationOptionKind.OnOff; + } + ( + { + // Additional options are only allowed when external authentication is set to ON + if (vOptionValue.OptionState != OptionState.On) + ThrowIncorrectSyntaxErrorException(vOptionValue); + } + LeftParenthesis + (vAlterServerConfigurationExternalAuthenticationOption=alterServerConfigurationExternalAuthenticationOption + { + AddAndUpdateTokenInfo(vResult, vResult.Suboptions, vAlterServerConfigurationExternalAuthenticationOption); + }) + RightParenthesis + | + { + // Empty rule: setting external authorization to OFF is the only allowed + if (vOptionValue.OptionState != OptionState.Off) + ThrowIncorrectSyntaxErrorException(vOptionValue); + } + ) + ; + +alterServerConfigurationExternalAuthenticationOption returns [AlterServerConfigurationExternalAuthenticationOption vResult] + : {NextTokenMatches(CodeGenerationSupporter.UseIdentity)}? + vResult = alterServerConfigurationExternalAuthenticationUseIdentityOption + | + {NextTokenMatches(CodeGenerationSupporter.CredentialName)}? + vResult = alterServerConfigurationExternalAuthenticationCredentialNameOption + ; + +alterServerConfigurationExternalAuthenticationCredentialNameOption returns [AlterServerConfigurationExternalAuthenticationOption vResult = FragmentFactory.CreateFragment()] +{ + LiteralOptionValue vCredentialName; +} + : tCredentialName:Identifier EqualsSign vCredentialName=stringLiteralOptionValue + { + Match(tCredentialName, CodeGenerationSupporter.CredentialName); + vResult.OptionKind = AlterServerConfigurationExternalAuthenticationOptionHelper.Instance.ParseOption(tCredentialName); + vResult.OptionValue = vCredentialName; + } + ; + +alterServerConfigurationExternalAuthenticationUseIdentityOption returns [AlterServerConfigurationExternalAuthenticationOption vResult = FragmentFactory.CreateFragment()] +{ +} + : tUseIdentity:Identifier + { + Match(tUseIdentity, CodeGenerationSupporter.UseIdentity); + vResult.OptionKind = AlterServerConfigurationExternalAuthenticationOptionHelper.Instance.ParseOption(tUseIdentity); + } + ; + +alterServerConfigurationSetSoftNumaStatement returns [AlterServerConfigurationSetSoftNumaStatement vResult = FragmentFactory.CreateFragment()] +{ + AlterServerConfigurationSoftNumaOption vOption; +} + : tSoftNuma:Identifier + { + Match(tSoftNuma, CodeGenerationSupporter.SoftNuma); + } + vOption=alterServerConfigurationSoftNumaOption + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + ; + +alterServerConfigurationSoftNumaOption returns [AlterServerConfigurationSoftNumaOption vResult = FragmentFactory.CreateFragment()] +{ + OnOffOptionValue vOptionValue; +} + : vOptionValue=onOffOptionValue + { + vResult.OptionKind = AlterServerConfigurationSoftNumaOptionKind.OnOff; + vResult.OptionValue = vOptionValue; + } + + ; + +alterServerConfigurationSetBufferPoolExtensionStatement returns [AlterServerConfigurationSetBufferPoolExtensionStatement vResult = FragmentFactory.CreateFragment()] +{ + AlterServerConfigurationBufferPoolExtensionOption vOption; +} + : tBuffer:Identifier tPool:Identifier tExtension:Identifier + { + Match(tBuffer, CodeGenerationSupporter.Buffer); + Match(tPool, CodeGenerationSupporter.Pool); + Match(tExtension, CodeGenerationSupporter.Extension); + } + vOption=alterServerConfigurationBufferPoolExtensionContainerOption + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + ; + +alterServerConfigurationBufferPoolExtensionContainerOption returns [AlterServerConfigurationBufferPoolExtensionContainerOption vResult = FragmentFactory.CreateFragment()] +{ + OnOffOptionValue vOptionValue; + AlterServerConfigurationBufferPoolExtensionOption vFileNameSuboption; + AlterServerConfigurationBufferPoolExtensionOption vSizeSuboption; +} + : vOptionValue=onOffOptionValue + { + vResult.OptionValue = vOptionValue; + vResult.OptionKind = AlterServerConfigurationBufferPoolExtensionOptionKind.OnOff; + } + ( + { + // Additional options are only allowed when buffer pool extension is set to ON + if (vOptionValue.OptionState != OptionState.On) + ThrowIncorrectSyntaxErrorException(vOptionValue); + } + tLParen:LeftParenthesis vFileNameSuboption=alterServerConfigurationBufferPoolExtensionFileNameOption + { + UpdateTokenInfo(vResult, tLParen); + AddAndUpdateTokenInfo(vResult, vResult.Suboptions, vFileNameSuboption); + } + tComma:Comma vSizeSuboption=alterServerConfigurationBufferPoolExtensionSizeOption tRParen:RightParenthesis + { + AddAndUpdateTokenInfo(vResult, vResult.Suboptions, vSizeSuboption); + UpdateTokenInfo(vResult, tRParen); + } + | + { + // Empty rule: setting buffer pool extension to OFF is the only allowed + if (vOptionValue.OptionState != OptionState.Off) + ThrowIncorrectSyntaxErrorException(vOptionValue); + } + ) + ; + +alterServerConfigurationBufferPoolExtensionFileNameOption returns [AlterServerConfigurationBufferPoolExtensionOption vResult = FragmentFactory.CreateFragment()] +{ + LiteralOptionValue vFileName; +} + : tFileName:Identifier EqualsSign vFileName=stringLiteralOptionValue + { + Match(tFileName, CodeGenerationSupporter.FileName); + vResult.OptionKind = AlterServerConfigurationBufferPoolExtensionOptionHelper.Instance.ParseOption(tFileName); + vResult.OptionValue = vFileName; + } + ; + +alterServerConfigurationBufferPoolExtensionSizeOption returns [AlterServerConfigurationBufferPoolExtensionSizeOption vResult = FragmentFactory.CreateFragment()] +{ + LiteralOptionValue vSize; + MemoryUnit vMemUnit; +} + : tSize:Identifier EqualsSign vSize=integerLiteralOptionValue vMemUnit=memUnit[vResult] + { + Match(tSize, CodeGenerationSupporter.Size); + + if (vMemUnit != MemoryUnit.KB && vMemUnit != MemoryUnit.MB && vMemUnit != MemoryUnit.GB) + ThrowIncorrectSyntaxErrorException(vSize); + + vResult.OptionKind = AlterServerConfigurationBufferPoolExtensionOptionHelper.Instance.ParseOption(tSize); + vResult.OptionValue = vSize; + vResult.SizeUnit = vMemUnit; + } + ; + +alterServerConfigurationSetDiagnosticsLogStatement returns [AlterServerConfigurationSetDiagnosticsLogStatement vResult = FragmentFactory.CreateFragment()] +{ + AlterServerConfigurationDiagnosticsLogOption vOption; +} + : tDiagnostics:Identifier tLog:Identifier + { + Match(tDiagnostics, CodeGenerationSupporter.Diagnostics); + Match(tLog, CodeGenerationSupporter.Log); + } + vOption=alterServerConfigurationDiagnosticsLogOption + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + ; + +alterServerConfigurationDiagnosticsLogOption returns [AlterServerConfigurationDiagnosticsLogOption vResult = FragmentFactory.CreateFragment()] +{ + OptionValue vOptionValue; +} + : vOptionValue=onOffOptionValue + { + vResult.OptionKind = AlterServerConfigurationDiagnosticsLogOptionKind.OnOff; + vResult.OptionValue = vOptionValue; + } + | + {NextTokenMatches(CodeGenerationSupporter.MaxUnderscoreSize)}? + vResult=alterServerConfigurationDiagnosticsLogMaxSizeOption + | + tLogOption:Identifier EqualsSign + { + vResult.OptionKind = AlterServerConfigurationDiagnosticsLogOptionHelper.Instance.ParseOption(tLogOption); + } + ( + {vResult.OptionKind == AlterServerConfigurationDiagnosticsLogOptionKind.Path}? + vOptionValue=stringOrDefaultLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + | + {vResult.OptionKind == AlterServerConfigurationDiagnosticsLogOptionKind.MaxFiles}? + vOptionValue=integerOrDefaultLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + ) + ; + +alterServerConfigurationDiagnosticsLogMaxSizeOption returns [AlterServerConfigurationDiagnosticsLogMaxSizeOption vResult = FragmentFactory.CreateFragment()] +{ + OptionValue vOptionValue; +} + : tMaxSize:Identifier EqualsSign + { + vResult.OptionKind = AlterServerConfigurationDiagnosticsLogOptionHelper.Instance.ParseOption(tMaxSize); + if (vResult.OptionKind != AlterServerConfigurationDiagnosticsLogOptionKind.MaxSize) + ThrowIncorrectSyntaxErrorException(tMaxSize); + } + ( + vOptionValue=integerLiteralOptionValue tMB:Identifier + { + Match(tMB, CodeGenerationSupporter.MB); + vResult.OptionValue = vOptionValue; + vResult.SizeUnit = MemoryUnit.MB; + UpdateTokenInfo(vResult, tMB); + } + | + vOptionValue=defaultLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + ) + ; + +alterServerConfigurationSetFailoverClusterPropertyStatement returns [AlterServerConfigurationSetFailoverClusterPropertyStatement vResult = FragmentFactory.CreateFragment()] +{ + AlterServerConfigurationFailoverClusterPropertyOption vOption; +} + : tFailover:Identifier tCluster:Identifier tProperty:Identifier + { + Match(tFailover, CodeGenerationSupporter.Failover); + Match(tCluster, CodeGenerationSupporter.Cluster); + Match(tProperty, CodeGenerationSupporter.Property); + } + vOption=alterServerConfigurationFailoverClusterPropertyOption + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + ; + +alterServerConfigurationFailoverClusterPropertyOption returns [AlterServerConfigurationFailoverClusterPropertyOption vResult = FragmentFactory.CreateFragment()] +{ + OptionValue vOptionValue; +} + : tProperty:Identifier EqualsSign + { + vResult.OptionKind = AlterServerConfigurationFailoverClusterPropertyOptionHelper.Instance.ParseOption(tProperty); + } + ( + {vResult.OptionKind == AlterServerConfigurationFailoverClusterPropertyOptionKind.SqlDumperDumpFlags}? + vOptionValue=binaryOrDefaultLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + | + {vResult.OptionKind == AlterServerConfigurationFailoverClusterPropertyOptionKind.SqlDumperDumpPath}? + vOptionValue=stringOrDefaultLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + | + vOptionValue=integerOrDefaultLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + ) + ; + +alterServerConfigurationSetHadrClusterStatement returns [AlterServerConfigurationSetHadrClusterStatement vResult = FragmentFactory.CreateFragment()] +{ + AlterServerConfigurationHadrClusterOption vOption; +} + : tHadr:Identifier tCluster:Identifier + { + Match(tHadr, CodeGenerationSupporter.Hadr); + Match(tCluster, CodeGenerationSupporter.Cluster); + } + vOption=alterServerConfigurationHadrClusterOption + { + AddAndUpdateTokenInfo(vResult, vResult.Options, vOption); + } + ; + +alterServerConfigurationHadrClusterOption returns [AlterServerConfigurationHadrClusterOption vResult = FragmentFactory.CreateFragment()] +{ + OptionValue vOptionValue; +} + : tOptionKind:Identifier EqualsSign + { + vResult.OptionKind = AlterServerConfigurationHadrClusterOptionHelper.Instance.ParseOption(tOptionKind); + } + ( + vOptionValue=stringLiteralOptionValue + { + vResult.OptionValue = vOptionValue; + } + | + tLocal:Identifier + { + Match(tLocal, CodeGenerationSupporter.Local); + vResult.IsLocal = true; + UpdateTokenInfo(vResult, tLocal); + } + ) + ; + +alterServerConfigurationSetProcessAffinityStatement returns [AlterServerConfigurationStatement vResult = FragmentFactory.CreateFragment()] + : tProcess:Identifier tAffinity:Identifier + { + Match(tProcess, CodeGenerationSupporter.Process); + Match(tAffinity, CodeGenerationSupporter.Affinity); + } + tCpuOrNumanode:Identifier EqualsSign + ( + affinityRangeList[vResult] + { + if (TryMatch(tCpuOrNumanode, CodeGenerationSupporter.Cpu)) + { + vResult.ProcessAffinity = ProcessAffinityType.Cpu; + } + else + { + Match(tCpuOrNumanode, CodeGenerationSupporter.NumaNode); + vResult.ProcessAffinity = ProcessAffinityType.NumaNode; + } + } + | + tAuto:Identifier + { + // AUTO implies CPU affinity + Match(tCpuOrNumanode, CodeGenerationSupporter.Cpu); + Match(tAuto, CodeGenerationSupporter.Auto); + + vResult.ProcessAffinity = ProcessAffinityType.CpuAuto; + + UpdateTokenInfo(vResult, tAuto); + } + ) + ; + +affinityRangeList [AlterServerConfigurationStatement vParent] +{ + ProcessAffinityRange vAffinityRange; +} + : vAffinityRange = affinityRange + { + AddAndUpdateTokenInfo(vParent, vParent.ProcessAffinityRanges, vAffinityRange); + } + (Comma vAffinityRange = affinityRange + { + AddAndUpdateTokenInfo(vParent, vParent.ProcessAffinityRanges, vAffinityRange); + } + )* + ; + +affinityRange returns [ProcessAffinityRange vResult = FragmentFactory.CreateFragment()] +{ + Literal vBoundary; +} + : vBoundary = integer + { + vResult.From = vBoundary; + } + (To vBoundary = integer + { + vResult.To = vBoundary; + } + )? + ; + +////////////////////////////////////////////////////////////////////// +// Alter Database +////////////////////////////////////////////////////////////////////// +alterDatabaseStatements returns [TSqlStatement vResult = null] + : tAlter:Alter Database + ( + // Conflicts with alterDatabase alternative below + {NextTokenMatches(CodeGenerationSupporter.Audit) && NextTokenMatches(CodeGenerationSupporter.Specification, 2)}? + vResult = alterDatabaseAuditSpecification[tAlter] + | + {NextTokenMatches(CodeGenerationSupporter.Scoped) && NextTokenMatches(CodeGenerationSupporter.Credential, 2)}? + vResult = alterDatabaseScopedCredentialStatement[tAlter] + | + {NextTokenMatches(CodeGenerationSupporter.Scoped) && NextTokenMatches(CodeGenerationSupporter.Configuration, 2)}? + vResult = alterDatabaseScopedConfigurationStatement[tAlter] + | + vResult = alterDatabase[tAlter] + | + vResult = alterDatabaseEncryptionKey[tAlter] + ) + ; + +alterDatabaseScopedConfigurationStatement[IToken tAlter] returns [AlterDatabaseScopedConfigurationStatement vResult] +{ + bool vSecondary = false; +} + : + tScoped:Identifier tConfiguration:Identifier + { + Match(tScoped, CodeGenerationSupporter.Scoped); + Match(tConfiguration, CodeGenerationSupporter.Configuration); + } + (For tSecondary:Identifier + { + vSecondary = true; + } + )? + ( + vResult = alterDatabaseScopedConfigSet[vSecondary] + | + vResult = alterDatabaseScopedConfigClear + ) + { + if (vSecondary) + { + vResult.Secondary = true; + UpdateTokenInfo(vResult,tSecondary); + } + UpdateTokenInfo(vResult,tAlter); + } + ; + +alterDatabaseScopedConfigClear returns [AlterDatabaseScopedConfigurationClearStatement vResult = FragmentFactory.CreateFragment()] +{ + DatabaseConfigurationClearOption vOption; +} + : + tClear:Identifier + { + Match(tClear, CodeGenerationSupporter.Clear); + UpdateTokenInfo(vResult, tClear); + } + vOption = databaseConfigurationClearOption + { + vResult.Option = vOption; + } + ; + +databaseConfigurationClearOption returns [DatabaseConfigurationClearOption vResult = FragmentFactory.CreateFragment()] +{ + BinaryLiteral vLiteral; +} + : + tOption:Identifier + { + vResult.OptionKind = DatabaseConfigClearOptionKindHelper.Instance.ParseOption(tOption, SqlVersionFlags.TSqlFabricDW); + UpdateTokenInfo(vResult, tOption); + } + ( + vLiteral = binary + { + vResult.PlanHandle = vLiteral; + } + )? + ; + +alterDatabaseScopedConfigSet[bool forSecondary] returns [AlterDatabaseScopedConfigurationSetStatement vResult = FragmentFactory.CreateFragment()] +{ + DatabaseConfigurationSetOption vOption; +} + : + Set + ( + {NextTokenMatches(CodeGenerationSupporter.MaxDop)}? + vOption = alterDatabaseScopedMaxDopOption[forSecondary] + | + {NextTokenMatches(CodeGenerationSupporter.QueryOptimizerHotFixes) || NextTokenMatches(CodeGenerationSupporter.ParameterSniffing) || + NextTokenMatches(CodeGenerationSupporter.LegacyCardinalityEstimation)}? + vOption = alterDatabaseScopedOnOffPrimaryOption[forSecondary] + | + vOption = alterDatabaseScopedGenericOption[forSecondary] + ) + { + vResult.Option = vOption; + } + ; + +alterDatabaseScopedMaxDopOption[bool forSecondary] returns [MaxDopConfigurationOption vResult = FragmentFactory.CreateFragment()] +{ + Literal vValue; +} + : + tMaxDop:Identifier EqualsSign + { + vResult.OptionKind = DatabaseConfigSetOptionKindHelper.Instance.ParseOption(tMaxDop, SqlVersionFlags.TSqlFabricDW); + UpdateTokenInfo(vResult, tMaxDop); + } + ( + vValue = integer + { + vResult.Value = vValue; + } + | + {NextTokenMatches(CodeGenerationSupporter.Primary)}? + tPrimary:Primary + { + if (!forSecondary) + { + ThrowParseErrorException("SQL46115", vResult, TSqlParserResource.SQL46115Message); + } + vResult.Primary = true; + UpdateTokenInfo(vResult, tPrimary); + } + ) + ; + +alterDatabaseScopedOnOffPrimaryOption[bool forSecondary] returns [OnOffPrimaryConfigurationOption vResult = FragmentFactory.CreateFragment()] +{ + DatabaseConfigurationOptionState vOptionState; +} + : + tOption:Identifier + { + vResult.OptionKind = DatabaseConfigSetOptionKindHelper.Instance.ParseOption(tOption, SqlVersionFlags.TSqlFabricDW); + UpdateTokenInfo(vResult, tOption); + } + EqualsSign + ( + vOptionState = databaseConfigurationOptionOnOffPrimary[vResult] + { + if(!forSecondary && vOptionState == DatabaseConfigurationOptionState.Primary) + { + ThrowParseErrorException("SQL46115", vResult, TSqlParserResource.SQL46115Message); + } + } + ) + { + vResult.OptionState = vOptionState; + } + ; + +alterDatabaseScopedGenericOption[bool forSecondary] returns [GenericConfigurationOption vResult = FragmentFactory.CreateFragment()] +{ + Identifier vOptionName; + Identifier vValueOnOff; + IdentifierOrScalarExpression vValue; + IToken token = LT(1); +} + : + vOptionName = identifier + { + vResult.GenericOptionKind = vOptionName; + } + EqualsSign + ( + {NextTokenMatches(CodeGenerationSupporter.Primary)}? + tPrimary:Primary + { + vValue = CreateIdentifierOrScalarExpressionFromIdentifier(CreateIdentifierFromToken(tPrimary)); + UpdateTokenInfo(vValue, token); + if (!forSecondary) + { + ThrowParseErrorException("SQL46115", vValue, TSqlParserResource.SQL46115Message); + } + vResult.GenericOptionState = vValue; + } + | + vValue = stringOrSignedIntegerOrIdentifier + { + vResult.GenericOptionState = vValue; + } + | + vValueOnOff = onOff + { + vResult.GenericOptionState = CreateIdentifierOrScalarExpressionFromIdentifier(vValueOnOff);; + } + ) + ; + +alterDatabase [IToken tAlter] returns [AlterDatabaseStatement vResult = null] +{ + Identifier vIdentifier = null; + bool vUseCurrent = false; +} + : ( + vIdentifier=identifier + | + vIdentifier=sqlCommandIdentifier + | + tCurrent:Current + { + vUseCurrent=true; + } + ) + ( vResult = alterDbAdd + | {NextTokenMatches(CodeGenerationSupporter.Remove)}? + vResult = alterDbRemove + | {NextTokenMatches(CodeGenerationSupporter.Modify)}? + vResult = alterDbModify + | vResult = alterDbSet + | vResult = alterDbCollate + | vResult = alterDbRebuild // Undocumented - for PSS only + ) + { + if(vUseCurrent) + { + vResult.UseCurrent = true; + UpdateTokenInfo(vResult,tCurrent); + } + else + { + vResult.DatabaseName = vIdentifier; + } + UpdateTokenInfo(vResult,tAlter); + ThrowPartialAstIfPhaseOne(vResult); + } + ; + exception + catch[PhaseOnePartialAstException exception] + { + UpdateTokenInfo(exception.Statement,tAlter); + (exception.Statement as AlterDatabaseStatement).DatabaseName = vIdentifier; + throw; + } + +alterDbCollate returns [AlterDatabaseCollateStatement vResult = FragmentFactory.CreateFragment()] + : collation[vResult] + ; + +alterDbRebuild returns [AlterDatabaseRebuildLogStatement vResult = FragmentFactory.CreateFragment()] +{ + FileDeclaration vFileDeclaration; +} + : tRebuild:Identifier tLog:Identifier + { + Match(tRebuild, CodeGenerationSupporter.Rebuild); + Match(tLog, CodeGenerationSupporter.Log); + UpdateTokenInfo(vResult,tLog); + ThrowPartialAstIfPhaseOne(vResult); + } + (On vFileDeclaration = fileDecl[false] + { + vResult.FileDeclaration = vFileDeclaration; + } + )? + ; + +alterDbAdd returns [AlterDatabaseStatement vResult = null] + : Add + ( + vResult = alterDbAddFile + | + vResult = alterDbAddFilegroup + ) + ; + +// Add File / Add LOG File +alterDbAddFile returns [AlterDatabaseAddFileStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : (tLog:Identifier + { + Match(tLog,CodeGenerationSupporter.Log); + vResult.IsLog = true; + } + )? + File + { + ThrowPartialAstIfPhaseOne(vResult); + } + fileDeclBodyList[vResult, vResult.FileDeclarations] + (vIdentifier = toFilegroup + { + vResult.FileGroup = vIdentifier; + } + )? + ; + +// Add FILEGROUP +alterDbAddFilegroup returns [AlterDatabaseAddFileGroupStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier; +} + : tFilegroup:Identifier vIdentifier=identifier + { + Match(tFilegroup, CodeGenerationSupporter.Filegroup); + vResult.FileGroup = vIdentifier; + } + (Contains tFileStreamOrMemoryOptimizedData:Identifier + { + if (TryMatch(tFileStreamOrMemoryOptimizedData, CodeGenerationSupporter.FileStream)) + { + vResult.ContainsFileStream = true;; + } + else + { + Match(tFileStreamOrMemoryOptimizedData, CodeGenerationSupporter.MemoryOptimizedData); + vResult.ContainsMemoryOptimizedData = true; + } + UpdateTokenInfo(vResult, tFileStreamOrMemoryOptimizedData); + } + )? + ; + +alterDbRemove returns [AlterDatabaseStatement vResult = null] +{ + Identifier vIdentifier; +} + : tRemove:Identifier + { + Match(tRemove,CodeGenerationSupporter.Remove); + } + (File vIdentifier = identifier + { + AlterDatabaseRemoveFileStatement removeFile = FragmentFactory.CreateFragment(); + removeFile.File = vIdentifier; + vResult = removeFile; + } + | + tFileGroup:Identifier vIdentifier = identifier + { + // REMOVE FILEGROUP + Match(tFileGroup,CodeGenerationSupporter.Filegroup); + AlterDatabaseRemoveFileGroupStatement vRemoveFilegroup = FragmentFactory.CreateFragment(); + vRemoveFilegroup.FileGroup = vIdentifier; + vResult = vRemoveFilegroup; + } + ) + ; + +alterDbModify returns [AlterDatabaseStatement vResult = null] +{ + Identifier vIdentifier; +} + : tModify:Identifier + { + Match(tModify,CodeGenerationSupporter.Modify); + } + ( + {NextTokenMatches(CodeGenerationSupporter.Name)}? + (tName:Identifier EqualsSign vIdentifier = identifier + { + // MODIFY NAME = + Match(tName,CodeGenerationSupporter.Name); + AlterDatabaseModifyNameStatement modifyDbName = FragmentFactory.CreateFragment(); + modifyDbName.NewDatabaseName = vIdentifier; + vResult = modifyDbName; + } + ) + | + (tFileGroup2:Identifier + { + Match(tFileGroup2,CodeGenerationSupporter.Filegroup); + } + vResult = alterDbModifyFilegroup + ) + | vResult = alterDbModifyFile + | vResult = alterDbModifyAzureOptions + ) + ; + +alterDbModifyAzureOptions returns [AlterDatabaseSetStatement vResult = FragmentFactory.CreateFragment()] + : + azureOptions[vResult, vResult.Options] + ; + +// MODIFY File syntax +alterDbModifyFile returns [AlterDatabaseModifyFileStatement vResult = FragmentFactory.CreateFragment()] +{ + FileDeclaration vFileDecl; +} + : File + { + ThrowPartialAstIfPhaseOne(vResult); + } + vFileDecl = fileDecl[true] + { + vResult.FileDeclaration = vFileDecl; + } + ; + +alterDbModifyFilegroup returns [AlterDatabaseModifyFileGroupStatement vResult = FragmentFactory.CreateFragment()] +{ + Identifier vIdentifier, vIdentifier2; + AlterDatabaseTermination vTermination; +} + : vIdentifier = identifier + { + vResult.FileGroup = vIdentifier; + } + ( + (tName2:Identifier EqualsSign vIdentifier2 = identifier + { + // MODIFY FILEGROUP NAME = + Match(tName2,CodeGenerationSupporter.Name); + vResult.NewFileGroupName = vIdentifier2; + ThrowPartialAstIfPhaseOne(vResult); + } + ) + | tDefault:Default + { + // MODIFY FILEGROUP Default + vResult.MakeDefault = true; + UpdateTokenInfo(vResult,tDefault); + } + | + (tUpdatabilityOption:Identifier + { + // MODIFY FILEGROUP