sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 build_date_delta, 14 date_delta_sql, 15 datestrtodate_sql, 16 generatedasidentitycolumnconstraint_sql, 17 max_or_greatest, 18 min_or_least, 19 rename_func, 20 strposition_sql, 21 timestrtotime_sql, 22 trim_sql, 23) 24from sqlglot.helper import seq_get 25from sqlglot.parser import build_coalesce 26from sqlglot.time import format_time 27from sqlglot.tokens import TokenType 28 29if t.TYPE_CHECKING: 30 from sqlglot._typing import E 31 32FULL_FORMAT_TIME_MAPPING = { 33 "weekday": "%A", 34 "dw": "%A", 35 "w": "%A", 36 "month": "%B", 37 "mm": "%B", 38 "m": "%B", 39} 40 41DATE_DELTA_INTERVAL = { 42 "year": "year", 43 "yyyy": "year", 44 "yy": "year", 45 "quarter": "quarter", 46 "qq": "quarter", 47 "q": "quarter", 48 "month": "month", 49 "mm": "month", 50 "m": "month", 51 "week": "week", 52 "ww": "week", 53 "wk": "week", 54 "day": "day", 55 "dd": "day", 56 "d": "day", 57} 58 59 60DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 61 62# N = Numeric, C=Currency 63TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 64 65DEFAULT_START_DATE = datetime.date(1900, 1, 1) 66 67BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 68 69# Unsupported options: 70# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 71# - TABLE HINT 72OPTIONS: parser.OPTIONS_TYPE = { 73 **dict.fromkeys( 74 ( 75 "DISABLE_OPTIMIZED_PLAN_FORCING", 76 "FAST", 77 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 78 "LABEL", 79 "MAXDOP", 80 "MAXRECURSION", 81 "MAX_GRANT_PERCENT", 82 "MIN_GRANT_PERCENT", 83 "NO_PERFORMANCE_SPOOL", 84 "QUERYTRACEON", 85 "RECOMPILE", 86 ), 87 tuple(), 88 ), 89 "CONCAT": ("UNION",), 90 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 91 "EXPAND": ("VIEWS",), 92 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 93 "HASH": ("GROUP", "JOIN", "UNION"), 94 "KEEP": ("PLAN",), 95 "KEEPFIXED": ("PLAN",), 96 "LOOP": ("JOIN",), 97 "MERGE": ("JOIN", "UNION"), 98 "OPTIMIZE": (("FOR", "UNKNOWN"),), 99 "ORDER": ("GROUP",), 100 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 101 "ROBUST": ("PLAN",), 102 "USE": ("PLAN",), 103} 104 105 106XML_OPTIONS: parser.OPTIONS_TYPE = { 107 **dict.fromkeys( 108 ( 109 "AUTO", 110 "EXPLICIT", 111 "TYPE", 112 ), 113 tuple(), 114 ), 115 "ELEMENTS": ( 116 "XSINIL", 117 "ABSENT", 118 ), 119 "BINARY": ("BASE64",), 120} 121 122 123OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 124 125 126def _build_formatted_time( 127 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 128) -> t.Callable[[t.List], E]: 129 def _builder(args: t.List) -> E: 130 fmt = seq_get(args, 0) 131 if isinstance(fmt, exp.Expression): 132 fmt = exp.Literal.string( 133 format_time( 134 fmt.name.lower(), 135 ( 136 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 137 if full_format_mapping 138 else TSQL.TIME_MAPPING 139 ), 140 ) 141 ) 142 143 this = seq_get(args, 1) 144 if isinstance(this, exp.Expression): 145 this = exp.cast(this, exp.DataType.Type.DATETIME2) 146 147 return exp_class(this=this, format=fmt) 148 149 return _builder 150 151 152def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 153 this = seq_get(args, 0) 154 fmt = seq_get(args, 1) 155 culture = seq_get(args, 2) 156 157 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 158 159 if number_fmt: 160 return exp.NumberToStr(this=this, format=fmt, culture=culture) 161 162 if fmt: 163 fmt = exp.Literal.string( 164 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 165 if len(fmt.name) == 1 166 else format_time(fmt.name, TSQL.TIME_MAPPING) 167 ) 168 169 return exp.TimeToStr(this=this, format=fmt, culture=culture) 170 171 172def _build_eomonth(args: t.List) -> exp.LastDay: 173 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 174 month_lag = seq_get(args, 1) 175 176 if month_lag is None: 177 this: exp.Expression = date 178 else: 179 unit = DATE_DELTA_INTERVAL.get("month") 180 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 181 182 return exp.LastDay(this=this) 183 184 185def _build_hashbytes(args: t.List) -> exp.Expression: 186 kind, data = args 187 kind = kind.name.upper() if kind.is_string else "" 188 189 if kind == "MD5": 190 args.pop(0) 191 return exp.MD5(this=data) 192 if kind in ("SHA", "SHA1"): 193 args.pop(0) 194 return exp.SHA(this=data) 195 if kind == "SHA2_256": 196 return exp.SHA2(this=data, length=exp.Literal.number(256)) 197 if kind == "SHA2_512": 198 return exp.SHA2(this=data, length=exp.Literal.number(512)) 199 200 return exp.func("HASHBYTES", *args) 201 202 203DATEPART_ONLY_FORMATS = {"DW", "WK", "HOUR", "QUARTER"} 204 205 206def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 207 fmt = expression.args["format"] 208 209 if not isinstance(expression, exp.NumberToStr): 210 if fmt.is_string: 211 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 212 213 name = (mapped_fmt or "").upper() 214 if name in DATEPART_ONLY_FORMATS: 215 return self.func("DATEPART", name, expression.this) 216 217 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 218 else: 219 fmt_sql = self.format_time(expression) or self.sql(fmt) 220 else: 221 fmt_sql = self.sql(fmt) 222 223 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 224 225 226def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 227 this = expression.this 228 distinct = expression.find(exp.Distinct) 229 if distinct: 230 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 231 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 232 this = distinct.pop().expressions[0] 233 234 order = "" 235 if isinstance(expression.this, exp.Order): 236 if expression.this.this: 237 this = expression.this.this.pop() 238 # Order has a leading space 239 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 240 241 separator = expression.args.get("separator") or exp.Literal.string(",") 242 return f"STRING_AGG({self.format_args(this, separator)}){order}" 243 244 245def _build_date_delta( 246 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 247) -> t.Callable[[t.List], E]: 248 def _builder(args: t.List) -> E: 249 unit = seq_get(args, 0) 250 if unit and unit_mapping: 251 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 252 253 start_date = seq_get(args, 1) 254 if start_date and start_date.is_number: 255 # Numeric types are valid DATETIME values 256 if start_date.is_int: 257 adds = DEFAULT_START_DATE + datetime.timedelta(days=start_date.to_py()) 258 start_date = exp.Literal.string(adds.strftime("%F")) 259 else: 260 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 261 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 262 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 263 264 return exp_class( 265 this=exp.TimeStrToTime(this=seq_get(args, 2)), 266 expression=exp.TimeStrToTime(this=start_date), 267 unit=unit, 268 ) 269 270 return _builder 271 272 273def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 274 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 275 alias = expression.args.get("alias") 276 277 if ( 278 isinstance(expression, (exp.CTE, exp.Subquery)) 279 and isinstance(alias, exp.TableAlias) 280 and not alias.columns 281 ): 282 from sqlglot.optimizer.qualify_columns import qualify_outputs 283 284 # We keep track of the unaliased column projection indexes instead of the expressions 285 # themselves, because the latter are going to be replaced by new nodes when the aliases 286 # are added and hence we won't be able to reach these newly added Alias parents 287 query = expression.this 288 unaliased_column_indexes = ( 289 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 290 ) 291 292 qualify_outputs(query) 293 294 # Preserve the quoting information of columns for newly added Alias nodes 295 query_selects = query.selects 296 for select_index in unaliased_column_indexes: 297 alias = query_selects[select_index] 298 column = alias.this 299 if isinstance(column.this, exp.Identifier): 300 alias.args["alias"].set("quoted", column.this.quoted) 301 302 return expression 303 304 305# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 306def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 307 return exp.TimestampFromParts( 308 year=seq_get(args, 0), 309 month=seq_get(args, 1), 310 day=seq_get(args, 2), 311 hour=seq_get(args, 3), 312 min=seq_get(args, 4), 313 sec=seq_get(args, 5), 314 milli=seq_get(args, 6), 315 ) 316 317 318# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 319def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 320 return exp.TimeFromParts( 321 hour=seq_get(args, 0), 322 min=seq_get(args, 1), 323 sec=seq_get(args, 2), 324 fractions=seq_get(args, 3), 325 precision=seq_get(args, 4), 326 ) 327 328 329def _build_with_arg_as_text( 330 klass: t.Type[exp.Expression], 331) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 332 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 333 this = seq_get(args, 0) 334 335 if this and not this.is_string: 336 this = exp.cast(this, exp.DataType.Type.TEXT) 337 338 expression = seq_get(args, 1) 339 kwargs = {"this": this} 340 341 if expression: 342 kwargs["expression"] = expression 343 344 return klass(**kwargs) 345 346 return _parse 347 348 349# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 350def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 351 # PARSENAME(...) will be stored into exp.SplitPart if: 352 # - All args are literals 353 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 354 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 355 this = args[0] 356 part_index = args[1] 357 split_count = len(this.name.split(".")) 358 if split_count <= 4: 359 return exp.SplitPart( 360 this=this, 361 delimiter=exp.Literal.string("."), 362 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 363 ) 364 365 return exp.Anonymous(this="PARSENAME", expressions=args) 366 367 368def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 369 if len(args) == 1: 370 # The default value for path is '$'. As a result, if you don't provide a 371 # value for path, JSON_QUERY returns the input expression. 372 args.append(exp.Literal.string("$")) 373 374 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 375 376 377def _json_extract_sql( 378 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 379) -> str: 380 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 381 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 382 return self.func("ISNULL", json_query, json_value) 383 384 385def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 386 sql = timestrtotime_sql(self, expression) 387 if expression.args.get("zone"): 388 # If there is a timezone, produce an expression like: 389 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 390 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 391 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 392 return sql 393 394 395def _build_datetrunc(args: t.List) -> exp.TimestampTrunc: 396 unit = seq_get(args, 0) 397 this = seq_get(args, 1) 398 399 if this and this.is_string: 400 this = exp.cast(this, exp.DataType.Type.DATETIME2) 401 402 return exp.TimestampTrunc(this=this, unit=unit) 403 404 405class TSQL(Dialect): 406 SUPPORTS_SEMI_ANTI_JOIN = False 407 LOG_BASE_FIRST = False 408 TYPED_DIVISION = True 409 CONCAT_COALESCE = True 410 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 411 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 412 413 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 414 415 TIME_MAPPING = { 416 "year": "%Y", 417 "dayofyear": "%j", 418 "day": "%d", 419 "dy": "%d", 420 "y": "%Y", 421 "week": "%W", 422 "ww": "%W", 423 "wk": "%W", 424 "hour": "%h", 425 "hh": "%I", 426 "minute": "%M", 427 "mi": "%M", 428 "n": "%M", 429 "second": "%S", 430 "ss": "%S", 431 "s": "%-S", 432 "millisecond": "%f", 433 "ms": "%f", 434 "weekday": "%w", 435 "dw": "%w", 436 "month": "%m", 437 "mm": "%M", 438 "m": "%-M", 439 "Y": "%Y", 440 "YYYY": "%Y", 441 "YY": "%y", 442 "MMMM": "%B", 443 "MMM": "%b", 444 "MM": "%m", 445 "M": "%-m", 446 "dddd": "%A", 447 "dd": "%d", 448 "d": "%-d", 449 "HH": "%H", 450 "H": "%-H", 451 "h": "%-I", 452 "ffffff": "%f", 453 "yyyy": "%Y", 454 "yy": "%y", 455 } 456 457 CONVERT_FORMAT_MAPPING = { 458 "0": "%b %d %Y %-I:%M%p", 459 "1": "%m/%d/%y", 460 "2": "%y.%m.%d", 461 "3": "%d/%m/%y", 462 "4": "%d.%m.%y", 463 "5": "%d-%m-%y", 464 "6": "%d %b %y", 465 "7": "%b %d, %y", 466 "8": "%H:%M:%S", 467 "9": "%b %d %Y %-I:%M:%S:%f%p", 468 "10": "mm-dd-yy", 469 "11": "yy/mm/dd", 470 "12": "yymmdd", 471 "13": "%d %b %Y %H:%M:ss:%f", 472 "14": "%H:%M:%S:%f", 473 "20": "%Y-%m-%d %H:%M:%S", 474 "21": "%Y-%m-%d %H:%M:%S.%f", 475 "22": "%m/%d/%y %-I:%M:%S %p", 476 "23": "%Y-%m-%d", 477 "24": "%H:%M:%S", 478 "25": "%Y-%m-%d %H:%M:%S.%f", 479 "100": "%b %d %Y %-I:%M%p", 480 "101": "%m/%d/%Y", 481 "102": "%Y.%m.%d", 482 "103": "%d/%m/%Y", 483 "104": "%d.%m.%Y", 484 "105": "%d-%m-%Y", 485 "106": "%d %b %Y", 486 "107": "%b %d, %Y", 487 "108": "%H:%M:%S", 488 "109": "%b %d %Y %-I:%M:%S:%f%p", 489 "110": "%m-%d-%Y", 490 "111": "%Y/%m/%d", 491 "112": "%Y%m%d", 492 "113": "%d %b %Y %H:%M:%S:%f", 493 "114": "%H:%M:%S:%f", 494 "120": "%Y-%m-%d %H:%M:%S", 495 "121": "%Y-%m-%d %H:%M:%S.%f", 496 "126": "%Y-%m-%dT%H:%M:%S.%f", 497 } 498 499 FORMAT_TIME_MAPPING = { 500 "y": "%B %Y", 501 "d": "%m/%d/%Y", 502 "H": "%-H", 503 "h": "%-I", 504 "s": "%Y-%m-%d %H:%M:%S", 505 "D": "%A,%B,%Y", 506 "f": "%A,%B,%Y %-I:%M %p", 507 "F": "%A,%B,%Y %-I:%M:%S %p", 508 "g": "%m/%d/%Y %-I:%M %p", 509 "G": "%m/%d/%Y %-I:%M:%S %p", 510 "M": "%B %-d", 511 "m": "%B %-d", 512 "O": "%Y-%m-%dT%H:%M:%S", 513 "u": "%Y-%M-%D %H:%M:%S%z", 514 "U": "%A, %B %D, %Y %H:%M:%S%z", 515 "T": "%-I:%M:%S %p", 516 "t": "%-I:%M", 517 "Y": "%a %Y", 518 } 519 520 class Tokenizer(tokens.Tokenizer): 521 IDENTIFIERS = [("[", "]"), '"'] 522 QUOTES = ["'", '"'] 523 HEX_STRINGS = [("0x", ""), ("0X", "")] 524 VAR_SINGLE_TOKENS = {"@", "$", "#"} 525 526 KEYWORDS = { 527 **tokens.Tokenizer.KEYWORDS, 528 "CLUSTERED INDEX": TokenType.INDEX, 529 "DATETIME2": TokenType.DATETIME2, 530 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 531 "DECLARE": TokenType.DECLARE, 532 "EXEC": TokenType.COMMAND, 533 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 534 "GO": TokenType.COMMAND, 535 "IMAGE": TokenType.IMAGE, 536 "MONEY": TokenType.MONEY, 537 "NONCLUSTERED INDEX": TokenType.INDEX, 538 "NTEXT": TokenType.TEXT, 539 "OPTION": TokenType.OPTION, 540 "OUTPUT": TokenType.RETURNING, 541 "PRINT": TokenType.COMMAND, 542 "PROC": TokenType.PROCEDURE, 543 "REAL": TokenType.FLOAT, 544 "ROWVERSION": TokenType.ROWVERSION, 545 "SMALLDATETIME": TokenType.SMALLDATETIME, 546 "SMALLMONEY": TokenType.SMALLMONEY, 547 "SQL_VARIANT": TokenType.VARIANT, 548 "SYSTEM_USER": TokenType.CURRENT_USER, 549 "TOP": TokenType.TOP, 550 "TIMESTAMP": TokenType.ROWVERSION, 551 "TINYINT": TokenType.UTINYINT, 552 "UNIQUEIDENTIFIER": TokenType.UUID, 553 "UPDATE STATISTICS": TokenType.COMMAND, 554 "XML": TokenType.XML, 555 } 556 KEYWORDS.pop("/*+") 557 558 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 559 560 class Parser(parser.Parser): 561 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 562 LOG_DEFAULTS_TO_LN = True 563 STRING_ALIASES = True 564 NO_PAREN_IF_COMMANDS = False 565 566 QUERY_MODIFIER_PARSERS = { 567 **parser.Parser.QUERY_MODIFIER_PARSERS, 568 TokenType.OPTION: lambda self: ("options", self._parse_options()), 569 TokenType.FOR: lambda self: ("for", self._parse_for()), 570 } 571 572 # T-SQL does not allow BEGIN to be used as an identifier 573 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 574 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 575 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 576 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 578 579 FUNCTIONS = { 580 **parser.Parser.FUNCTIONS, 581 "CHARINDEX": lambda args: exp.StrPosition( 582 this=seq_get(args, 1), 583 substr=seq_get(args, 0), 584 position=seq_get(args, 2), 585 ), 586 "COUNT": lambda args: exp.Count( 587 this=seq_get(args, 0), expressions=args[1:], big_int=False 588 ), 589 "COUNT_BIG": lambda args: exp.Count( 590 this=seq_get(args, 0), expressions=args[1:], big_int=True 591 ), 592 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 593 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 595 "DATEPART": _build_formatted_time(exp.TimeToStr), 596 "DATETIMEFROMPARTS": _build_datetimefromparts, 597 "EOMONTH": _build_eomonth, 598 "FORMAT": _build_format, 599 "GETDATE": exp.CurrentTimestamp.from_arg_list, 600 "HASHBYTES": _build_hashbytes, 601 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 602 "JSON_QUERY": _build_json_query, 603 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 604 "LEN": _build_with_arg_as_text(exp.Length), 605 "LEFT": _build_with_arg_as_text(exp.Left), 606 "NEWID": exp.Uuid.from_arg_list, 607 "RIGHT": _build_with_arg_as_text(exp.Right), 608 "PARSENAME": _build_parsename, 609 "REPLICATE": exp.Repeat.from_arg_list, 610 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 611 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 612 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 613 "SUSER_NAME": exp.CurrentUser.from_arg_list, 614 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 615 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 616 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 617 "TIMEFROMPARTS": _build_timefromparts, 618 "DATETRUNC": _build_datetrunc, 619 } 620 621 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 622 623 PROCEDURE_OPTIONS = dict.fromkeys( 624 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 625 ) 626 627 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 628 629 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 630 TokenType.TABLE, 631 *parser.Parser.TYPE_TOKENS, 632 } 633 634 STATEMENT_PARSERS = { 635 **parser.Parser.STATEMENT_PARSERS, 636 TokenType.DECLARE: lambda self: self._parse_declare(), 637 } 638 639 RANGE_PARSERS = { 640 **parser.Parser.RANGE_PARSERS, 641 TokenType.DCOLON: lambda self, this: self.expression( 642 exp.ScopeResolution, 643 this=this, 644 expression=self._parse_function() or self._parse_var(any_token=True), 645 ), 646 } 647 648 NO_PAREN_FUNCTION_PARSERS = { 649 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 650 "NEXT": lambda self: self._parse_next_value_for(), 651 } 652 653 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 654 **parser.Parser.FUNCTION_PARSERS, 655 "JSON_ARRAYAGG": lambda self: self.expression( 656 exp.JSONArrayAgg, 657 this=self._parse_bitwise(), 658 order=self._parse_order(), 659 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 660 ), 661 } 662 663 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 664 COLUMN_OPERATORS = { 665 **parser.Parser.COLUMN_OPERATORS, 666 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 667 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 668 else self.expression(exp.ScopeResolution, this=this, expression=to), 669 } 670 671 SET_OP_MODIFIERS = {"offset"} 672 673 def _parse_alter_table_set(self) -> exp.AlterSet: 674 return self._parse_wrapped(super()._parse_alter_table_set) 675 676 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 677 if self._match(TokenType.MERGE): 678 comments = self._prev_comments 679 merge = self._parse_merge() 680 merge.add_comments(comments, prepend=True) 681 return merge 682 683 return super()._parse_wrapped_select(table=table) 684 685 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 686 # We want to use _parse_types() if the first token after :: is a known type, 687 # otherwise we could parse something like x::varchar(max) into a function 688 if self._match_set(self.TYPE_TOKENS, advance=False): 689 return self._parse_types() 690 691 return self._parse_function() or self._parse_types() 692 693 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 694 if not self._match(TokenType.OPTION): 695 return None 696 697 def _parse_option() -> t.Optional[exp.Expression]: 698 option = self._parse_var_from_options(OPTIONS) 699 if not option: 700 return None 701 702 self._match(TokenType.EQ) 703 return self.expression( 704 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 705 ) 706 707 return self._parse_wrapped_csv(_parse_option) 708 709 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 710 this = self._parse_primary_or_var() 711 if self._match(TokenType.L_PAREN, advance=False): 712 expression = self._parse_wrapped(self._parse_string) 713 else: 714 expression = None 715 716 return exp.XMLKeyValueOption(this=this, expression=expression) 717 718 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 719 if not self._match_pair(TokenType.FOR, TokenType.XML): 720 return None 721 722 def _parse_for_xml() -> t.Optional[exp.Expression]: 723 return self.expression( 724 exp.QueryOption, 725 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 726 or self._parse_xml_key_value_option(), 727 ) 728 729 return self._parse_csv(_parse_for_xml) 730 731 def _parse_projections(self) -> t.List[exp.Expression]: 732 """ 733 T-SQL supports the syntax alias = expression in the SELECT's projection list, 734 so we transform all parsed Selects to convert their EQ projections into Aliases. 735 736 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 737 """ 738 return [ 739 ( 740 exp.alias_(projection.expression, projection.this.this, copy=False) 741 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 742 else projection 743 ) 744 for projection in super()._parse_projections() 745 ] 746 747 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 748 """Applies to SQL Server and Azure SQL Database 749 COMMIT [ { TRAN | TRANSACTION } 750 [ transaction_name | @tran_name_variable ] ] 751 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 752 753 ROLLBACK { TRAN | TRANSACTION } 754 [ transaction_name | @tran_name_variable 755 | savepoint_name | @savepoint_variable ] 756 """ 757 rollback = self._prev.token_type == TokenType.ROLLBACK 758 759 self._match_texts(("TRAN", "TRANSACTION")) 760 this = self._parse_id_var() 761 762 if rollback: 763 return self.expression(exp.Rollback, this=this) 764 765 durability = None 766 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 767 self._match_text_seq("DELAYED_DURABILITY") 768 self._match(TokenType.EQ) 769 770 if self._match_text_seq("OFF"): 771 durability = False 772 else: 773 self._match(TokenType.ON) 774 durability = True 775 776 self._match_r_paren() 777 778 return self.expression(exp.Commit, this=this, durability=durability) 779 780 def _parse_transaction(self) -> exp.Transaction | exp.Command: 781 """Applies to SQL Server and Azure SQL Database 782 BEGIN { TRAN | TRANSACTION } 783 [ { transaction_name | @tran_name_variable } 784 [ WITH MARK [ 'description' ] ] 785 ] 786 """ 787 if self._match_texts(("TRAN", "TRANSACTION")): 788 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 789 if self._match_text_seq("WITH", "MARK"): 790 transaction.set("mark", self._parse_string()) 791 792 return transaction 793 794 return self._parse_as_command(self._prev) 795 796 def _parse_returns(self) -> exp.ReturnsProperty: 797 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 798 returns = super()._parse_returns() 799 returns.set("table", table) 800 return returns 801 802 def _parse_convert( 803 self, strict: bool, safe: t.Optional[bool] = None 804 ) -> t.Optional[exp.Expression]: 805 this = self._parse_types() 806 self._match(TokenType.COMMA) 807 args = [this, *self._parse_csv(self._parse_assignment)] 808 convert = exp.Convert.from_arg_list(args) 809 convert.set("safe", safe) 810 convert.set("strict", strict) 811 return convert 812 813 def _parse_column_def( 814 self, this: t.Optional[exp.Expression], computed_column: bool = True 815 ) -> t.Optional[exp.Expression]: 816 this = super()._parse_column_def(this=this, computed_column=computed_column) 817 if not this: 818 return None 819 if self._match(TokenType.EQ): 820 this.set("default", self._parse_disjunction()) 821 if self._match_texts(self.COLUMN_DEFINITION_MODES): 822 this.set("output", self._prev.text) 823 return this 824 825 def _parse_user_defined_function( 826 self, kind: t.Optional[TokenType] = None 827 ) -> t.Optional[exp.Expression]: 828 this = super()._parse_user_defined_function(kind=kind) 829 830 if ( 831 kind == TokenType.FUNCTION 832 or isinstance(this, exp.UserDefinedFunction) 833 or self._match(TokenType.ALIAS, advance=False) 834 ): 835 return this 836 837 if not self._match(TokenType.WITH, advance=False): 838 expressions = self._parse_csv(self._parse_function_parameter) 839 else: 840 expressions = None 841 842 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 843 844 def _parse_into(self) -> t.Optional[exp.Into]: 845 into = super()._parse_into() 846 847 table = isinstance(into, exp.Into) and into.find(exp.Table) 848 if isinstance(table, exp.Table): 849 table_identifier = table.this 850 if table_identifier.args.get("temporary"): 851 # Promote the temporary property from the Identifier to the Into expression 852 t.cast(exp.Into, into).set("temporary", True) 853 854 return into 855 856 def _parse_id_var( 857 self, 858 any_token: bool = True, 859 tokens: t.Optional[t.Collection[TokenType]] = None, 860 ) -> t.Optional[exp.Expression]: 861 is_temporary = self._match(TokenType.HASH) 862 is_global = is_temporary and self._match(TokenType.HASH) 863 864 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 865 if this: 866 if is_global: 867 this.set("global", True) 868 elif is_temporary: 869 this.set("temporary", True) 870 871 return this 872 873 def _parse_create(self) -> exp.Create | exp.Command: 874 create = super()._parse_create() 875 876 if isinstance(create, exp.Create): 877 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 878 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 879 if not create.args.get("properties"): 880 create.set("properties", exp.Properties(expressions=[])) 881 882 create.args["properties"].append("expressions", exp.TemporaryProperty()) 883 884 return create 885 886 def _parse_if(self) -> t.Optional[exp.Expression]: 887 index = self._index 888 889 if self._match_text_seq("OBJECT_ID"): 890 self._parse_wrapped_csv(self._parse_string) 891 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 892 return self._parse_drop(exists=True) 893 self._retreat(index) 894 895 return super()._parse_if() 896 897 def _parse_unique(self) -> exp.UniqueColumnConstraint: 898 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 899 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 900 else: 901 this = self._parse_schema(self._parse_id_var(any_token=False)) 902 903 return self.expression(exp.UniqueColumnConstraint, this=this) 904 905 def _parse_partition(self) -> t.Optional[exp.Partition]: 906 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 907 return None 908 909 def parse_range(): 910 low = self._parse_bitwise() 911 high = self._parse_bitwise() if self._match_text_seq("TO") else None 912 913 return ( 914 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 915 ) 916 917 partition = self.expression( 918 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 919 ) 920 921 self._match_r_paren() 922 923 return partition 924 925 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 926 var = self._parse_id_var() 927 if not var: 928 return None 929 930 self._match(TokenType.ALIAS) 931 return self.expression( 932 exp.DeclareItem, 933 this=var, 934 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 935 default=self._match(TokenType.EQ) and self._parse_bitwise(), 936 ) 937 938 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 939 expression = super()._parse_alter_table_alter() 940 941 if expression is not None: 942 collation = expression.args.get("collate") 943 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 944 identifier = collation.this 945 collation.set("this", exp.Var(this=identifier.name)) 946 947 return expression 948 949 class Generator(generator.Generator): 950 LIMIT_IS_TOP = True 951 QUERY_HINTS = False 952 RETURNING_END = False 953 NVL2_SUPPORTED = False 954 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 955 LIMIT_FETCH = "FETCH" 956 COMPUTED_COLUMN_WITH_TYPE = False 957 CTE_RECURSIVE_KEYWORD_REQUIRED = False 958 ENSURE_BOOLS = True 959 NULL_ORDERING_SUPPORTED = None 960 SUPPORTS_SINGLE_ARG_CONCAT = False 961 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 962 SUPPORTS_SELECT_INTO = True 963 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 964 SUPPORTS_TO_NUMBER = False 965 SET_OP_MODIFIERS = False 966 COPY_PARAMS_EQ_REQUIRED = True 967 PARSE_JSON_NAME = None 968 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 969 ALTER_SET_WRAPPED = True 970 ALTER_SET_TYPE = "" 971 972 EXPRESSIONS_WITHOUT_NESTED_CTES = { 973 exp.Create, 974 exp.Delete, 975 exp.Insert, 976 exp.Intersect, 977 exp.Except, 978 exp.Merge, 979 exp.Select, 980 exp.Subquery, 981 exp.Union, 982 exp.Update, 983 } 984 985 SUPPORTED_JSON_PATH_PARTS = { 986 exp.JSONPathKey, 987 exp.JSONPathRoot, 988 exp.JSONPathSubscript, 989 } 990 991 TYPE_MAPPING = { 992 **generator.Generator.TYPE_MAPPING, 993 exp.DataType.Type.BOOLEAN: "BIT", 994 exp.DataType.Type.DATETIME2: "DATETIME2", 995 exp.DataType.Type.DECIMAL: "NUMERIC", 996 exp.DataType.Type.DOUBLE: "FLOAT", 997 exp.DataType.Type.INT: "INTEGER", 998 exp.DataType.Type.ROWVERSION: "ROWVERSION", 999 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1000 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1001 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1003 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1004 exp.DataType.Type.UTINYINT: "TINYINT", 1005 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1006 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1007 } 1008 1009 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1010 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1011 1012 TRANSFORMS = { 1013 **generator.Generator.TRANSFORMS, 1014 exp.AnyValue: any_value_to_max_sql, 1015 exp.ArrayToString: rename_func("STRING_AGG"), 1016 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1017 exp.Chr: rename_func("CHAR"), 1018 exp.DateAdd: date_delta_sql("DATEADD"), 1019 exp.DateDiff: date_delta_sql("DATEDIFF"), 1020 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1021 exp.CurrentDate: rename_func("GETDATE"), 1022 exp.CurrentTimestamp: rename_func("GETDATE"), 1023 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1024 exp.DateStrToDate: datestrtodate_sql, 1025 exp.Extract: rename_func("DATEPART"), 1026 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1027 exp.GroupConcat: _string_agg_sql, 1028 exp.If: rename_func("IIF"), 1029 exp.JSONExtract: _json_extract_sql, 1030 exp.JSONExtractScalar: _json_extract_sql, 1031 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1032 exp.Ln: rename_func("LOG"), 1033 exp.Max: max_or_greatest, 1034 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1035 exp.Min: min_or_least, 1036 exp.NumberToStr: _format_sql, 1037 exp.Repeat: rename_func("REPLICATE"), 1038 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1039 exp.Select: transforms.preprocess( 1040 [ 1041 transforms.eliminate_distinct_on, 1042 transforms.eliminate_semi_and_anti_joins, 1043 transforms.eliminate_qualify, 1044 transforms.unnest_generate_date_array_using_recursive_cte, 1045 ] 1046 ), 1047 exp.Stddev: rename_func("STDEV"), 1048 exp.StrPosition: lambda self, e: strposition_sql( 1049 self, e, func_name="CHARINDEX", supports_position=True 1050 ), 1051 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1052 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1053 exp.SHA2: lambda self, e: self.func( 1054 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1055 ), 1056 exp.TemporaryProperty: lambda self, e: "", 1057 exp.TimeStrToTime: _timestrtotime_sql, 1058 exp.TimeToStr: _format_sql, 1059 exp.Trim: trim_sql, 1060 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1061 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1062 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1063 exp.Uuid: lambda *_: "NEWID()", 1064 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1065 } 1066 1067 TRANSFORMS.pop(exp.ReturnsProperty) 1068 1069 PROPERTIES_LOCATION = { 1070 **generator.Generator.PROPERTIES_LOCATION, 1071 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1072 } 1073 1074 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1075 return f"{scope_name}::{rhs}" 1076 1077 def select_sql(self, expression: exp.Select) -> str: 1078 limit = expression.args.get("limit") 1079 offset = expression.args.get("offset") 1080 1081 if isinstance(limit, exp.Fetch) and not offset: 1082 # Dialects like Oracle can FETCH directly from a row set but 1083 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1084 offset = exp.Offset(expression=exp.Literal.number(0)) 1085 expression.set("offset", offset) 1086 1087 if offset: 1088 if not expression.args.get("order"): 1089 # ORDER BY is required in order to use OFFSET in a query, so we use 1090 # a noop order by, since we don't really care about the order. 1091 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1092 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1093 1094 if isinstance(limit, exp.Limit): 1095 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1096 # we replace here because otherwise TOP would be generated in select_sql 1097 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1098 1099 return super().select_sql(expression) 1100 1101 def convert_sql(self, expression: exp.Convert) -> str: 1102 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1103 return self.func( 1104 name, expression.this, expression.expression, expression.args.get("style") 1105 ) 1106 1107 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1108 option = self.sql(expression, "this") 1109 value = self.sql(expression, "expression") 1110 if value: 1111 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1112 return f"{option} {optional_equal_sign}{value}" 1113 return option 1114 1115 def lateral_op(self, expression: exp.Lateral) -> str: 1116 cross_apply = expression.args.get("cross_apply") 1117 if cross_apply is True: 1118 return "CROSS APPLY" 1119 if cross_apply is False: 1120 return "OUTER APPLY" 1121 1122 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1123 self.unsupported("LATERAL clause is not supported.") 1124 return "LATERAL" 1125 1126 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1127 this = expression.this 1128 split_count = len(this.name.split(".")) 1129 delimiter = expression.args.get("delimiter") 1130 part_index = expression.args.get("part_index") 1131 1132 if ( 1133 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1134 or (delimiter and delimiter.name != ".") 1135 or not part_index 1136 or split_count > 4 1137 ): 1138 self.unsupported( 1139 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1140 ) 1141 return "" 1142 1143 return self.func( 1144 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1145 ) 1146 1147 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1148 nano = expression.args.get("nano") 1149 if nano is not None: 1150 nano.pop() 1151 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1152 1153 if expression.args.get("fractions") is None: 1154 expression.set("fractions", exp.Literal.number(0)) 1155 if expression.args.get("precision") is None: 1156 expression.set("precision", exp.Literal.number(0)) 1157 1158 return rename_func("TIMEFROMPARTS")(self, expression) 1159 1160 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1161 zone = expression.args.get("zone") 1162 if zone is not None: 1163 zone.pop() 1164 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1165 1166 nano = expression.args.get("nano") 1167 if nano is not None: 1168 nano.pop() 1169 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1170 1171 if expression.args.get("milli") is None: 1172 expression.set("milli", exp.Literal.number(0)) 1173 1174 return rename_func("DATETIMEFROMPARTS")(self, expression) 1175 1176 def setitem_sql(self, expression: exp.SetItem) -> str: 1177 this = expression.this 1178 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1179 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1180 return f"{self.sql(this.left)} {self.sql(this.right)}" 1181 1182 return super().setitem_sql(expression) 1183 1184 def boolean_sql(self, expression: exp.Boolean) -> str: 1185 if type(expression.parent) in BIT_TYPES or isinstance( 1186 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1187 ): 1188 return "1" if expression.this else "0" 1189 1190 return "(1 = 1)" if expression.this else "(1 = 0)" 1191 1192 def is_sql(self, expression: exp.Is) -> str: 1193 if isinstance(expression.expression, exp.Boolean): 1194 return self.binary(expression, "=") 1195 return self.binary(expression, "IS") 1196 1197 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1198 sql = self.sql(expression, "this") 1199 properties = expression.args.get("properties") 1200 1201 if sql[:1] != "#" and any( 1202 isinstance(prop, exp.TemporaryProperty) 1203 for prop in (properties.expressions if properties else []) 1204 ): 1205 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1206 1207 return sql 1208 1209 def create_sql(self, expression: exp.Create) -> str: 1210 kind = expression.kind 1211 exists = expression.args.pop("exists", None) 1212 1213 like_property = expression.find(exp.LikeProperty) 1214 if like_property: 1215 ctas_expression = like_property.this 1216 else: 1217 ctas_expression = expression.expression 1218 1219 if kind == "VIEW": 1220 expression.this.set("catalog", None) 1221 with_ = expression.args.get("with") 1222 if ctas_expression and with_: 1223 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1224 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1225 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1226 ctas_expression.set("with", with_.pop()) 1227 1228 table = expression.find(exp.Table) 1229 1230 # Convert CTAS statement to SELECT .. INTO .. 1231 if kind == "TABLE" and ctas_expression: 1232 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1233 ctas_expression = ctas_expression.subquery() 1234 1235 properties = expression.args.get("properties") or exp.Properties() 1236 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1237 1238 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1239 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1240 1241 if like_property: 1242 select_into.limit(0, copy=False) 1243 1244 sql = self.sql(select_into) 1245 else: 1246 sql = super().create_sql(expression) 1247 1248 if exists: 1249 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1250 sql_with_ctes = self.prepend_ctes(expression, sql) 1251 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1252 if kind == "SCHEMA": 1253 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1254 elif kind == "TABLE": 1255 assert table 1256 where = exp.and_( 1257 exp.column("TABLE_NAME").eq(table.name), 1258 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1259 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1260 ) 1261 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1262 elif kind == "INDEX": 1263 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1264 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1265 elif expression.args.get("replace"): 1266 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1267 1268 return self.prepend_ctes(expression, sql) 1269 1270 @generator.unsupported_args("unlogged", "expressions") 1271 def into_sql(self, expression: exp.Into) -> str: 1272 if expression.args.get("temporary"): 1273 # If the Into expression has a temporary property, push this down to the Identifier 1274 table = expression.find(exp.Table) 1275 if table and isinstance(table.this, exp.Identifier): 1276 table.this.set("temporary", True) 1277 1278 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1279 1280 def count_sql(self, expression: exp.Count) -> str: 1281 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1282 return rename_func(func_name)(self, expression) 1283 1284 def offset_sql(self, expression: exp.Offset) -> str: 1285 return f"{super().offset_sql(expression)} ROWS" 1286 1287 def version_sql(self, expression: exp.Version) -> str: 1288 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1289 this = f"FOR {name}" 1290 expr = expression.expression 1291 kind = expression.text("kind") 1292 if kind in ("FROM", "BETWEEN"): 1293 args = expr.expressions 1294 sep = "TO" if kind == "FROM" else "AND" 1295 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1296 else: 1297 expr_sql = self.sql(expr) 1298 1299 expr_sql = f" {expr_sql}" if expr_sql else "" 1300 return f"{this} {kind}{expr_sql}" 1301 1302 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1303 table = expression.args.get("table") 1304 table = f"{table} " if table else "" 1305 return f"RETURNS {table}{self.sql(expression, 'this')}" 1306 1307 def returning_sql(self, expression: exp.Returning) -> str: 1308 into = self.sql(expression, "into") 1309 into = self.seg(f"INTO {into}") if into else "" 1310 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1311 1312 def transaction_sql(self, expression: exp.Transaction) -> str: 1313 this = self.sql(expression, "this") 1314 this = f" {this}" if this else "" 1315 mark = self.sql(expression, "mark") 1316 mark = f" WITH MARK {mark}" if mark else "" 1317 return f"BEGIN TRANSACTION{this}{mark}" 1318 1319 def commit_sql(self, expression: exp.Commit) -> str: 1320 this = self.sql(expression, "this") 1321 this = f" {this}" if this else "" 1322 durability = expression.args.get("durability") 1323 durability = ( 1324 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1325 if durability is not None 1326 else "" 1327 ) 1328 return f"COMMIT TRANSACTION{this}{durability}" 1329 1330 def rollback_sql(self, expression: exp.Rollback) -> str: 1331 this = self.sql(expression, "this") 1332 this = f" {this}" if this else "" 1333 return f"ROLLBACK TRANSACTION{this}" 1334 1335 def identifier_sql(self, expression: exp.Identifier) -> str: 1336 identifier = super().identifier_sql(expression) 1337 1338 if expression.args.get("global"): 1339 identifier = f"##{identifier}" 1340 elif expression.args.get("temporary"): 1341 identifier = f"#{identifier}" 1342 1343 return identifier 1344 1345 def constraint_sql(self, expression: exp.Constraint) -> str: 1346 this = self.sql(expression, "this") 1347 expressions = self.expressions(expression, flat=True, sep=" ") 1348 return f"CONSTRAINT {this} {expressions}" 1349 1350 def length_sql(self, expression: exp.Length) -> str: 1351 return self._uncast_text(expression, "LEN") 1352 1353 def right_sql(self, expression: exp.Right) -> str: 1354 return self._uncast_text(expression, "RIGHT") 1355 1356 def left_sql(self, expression: exp.Left) -> str: 1357 return self._uncast_text(expression, "LEFT") 1358 1359 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1360 this = expression.this 1361 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1362 this_sql = self.sql(this, "this") 1363 else: 1364 this_sql = self.sql(this) 1365 expression_sql = self.sql(expression, "expression") 1366 return self.func(name, this_sql, expression_sql if expression_sql else None) 1367 1368 def partition_sql(self, expression: exp.Partition) -> str: 1369 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1370 1371 def alter_sql(self, expression: exp.Alter) -> str: 1372 action = seq_get(expression.args.get("actions") or [], 0) 1373 if isinstance(action, exp.AlterRename): 1374 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1375 return super().alter_sql(expression) 1376 1377 def drop_sql(self, expression: exp.Drop) -> str: 1378 if expression.args["kind"] == "VIEW": 1379 expression.this.set("catalog", None) 1380 return super().drop_sql(expression) 1381 1382 def options_modifier(self, expression: exp.Expression) -> str: 1383 options = self.expressions(expression, key="options") 1384 return f" OPTION{self.wrap(options)}" if options else "" 1385 1386 def dpipe_sql(self, expression: exp.DPipe) -> str: 1387 return self.sql( 1388 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1389 ) 1390 1391 def isascii_sql(self, expression: exp.IsAscii) -> str: 1392 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1393 1394 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1395 this = super().columndef_sql(expression, sep) 1396 default = self.sql(expression, "default") 1397 default = f" = {default}" if default else "" 1398 output = self.sql(expression, "output") 1399 output = f" {output}" if output else "" 1400 return f"{this}{default}{output}" 1401 1402 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1403 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1404 return rename_func(func_name)(self, expression)
274def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 275 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 276 alias = expression.args.get("alias") 277 278 if ( 279 isinstance(expression, (exp.CTE, exp.Subquery)) 280 and isinstance(alias, exp.TableAlias) 281 and not alias.columns 282 ): 283 from sqlglot.optimizer.qualify_columns import qualify_outputs 284 285 # We keep track of the unaliased column projection indexes instead of the expressions 286 # themselves, because the latter are going to be replaced by new nodes when the aliases 287 # are added and hence we won't be able to reach these newly added Alias parents 288 query = expression.this 289 unaliased_column_indexes = ( 290 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 291 ) 292 293 qualify_outputs(query) 294 295 # Preserve the quoting information of columns for newly added Alias nodes 296 query_selects = query.selects 297 for select_index in unaliased_column_indexes: 298 alias = query_selects[select_index] 299 column = alias.this 300 if isinstance(column.this, exp.Identifier): 301 alias.args["alias"].set("quoted", column.this.quoted) 302 303 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
406class TSQL(Dialect): 407 SUPPORTS_SEMI_ANTI_JOIN = False 408 LOG_BASE_FIRST = False 409 TYPED_DIVISION = True 410 CONCAT_COALESCE = True 411 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 412 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 413 414 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 415 416 TIME_MAPPING = { 417 "year": "%Y", 418 "dayofyear": "%j", 419 "day": "%d", 420 "dy": "%d", 421 "y": "%Y", 422 "week": "%W", 423 "ww": "%W", 424 "wk": "%W", 425 "hour": "%h", 426 "hh": "%I", 427 "minute": "%M", 428 "mi": "%M", 429 "n": "%M", 430 "second": "%S", 431 "ss": "%S", 432 "s": "%-S", 433 "millisecond": "%f", 434 "ms": "%f", 435 "weekday": "%w", 436 "dw": "%w", 437 "month": "%m", 438 "mm": "%M", 439 "m": "%-M", 440 "Y": "%Y", 441 "YYYY": "%Y", 442 "YY": "%y", 443 "MMMM": "%B", 444 "MMM": "%b", 445 "MM": "%m", 446 "M": "%-m", 447 "dddd": "%A", 448 "dd": "%d", 449 "d": "%-d", 450 "HH": "%H", 451 "H": "%-H", 452 "h": "%-I", 453 "ffffff": "%f", 454 "yyyy": "%Y", 455 "yy": "%y", 456 } 457 458 CONVERT_FORMAT_MAPPING = { 459 "0": "%b %d %Y %-I:%M%p", 460 "1": "%m/%d/%y", 461 "2": "%y.%m.%d", 462 "3": "%d/%m/%y", 463 "4": "%d.%m.%y", 464 "5": "%d-%m-%y", 465 "6": "%d %b %y", 466 "7": "%b %d, %y", 467 "8": "%H:%M:%S", 468 "9": "%b %d %Y %-I:%M:%S:%f%p", 469 "10": "mm-dd-yy", 470 "11": "yy/mm/dd", 471 "12": "yymmdd", 472 "13": "%d %b %Y %H:%M:ss:%f", 473 "14": "%H:%M:%S:%f", 474 "20": "%Y-%m-%d %H:%M:%S", 475 "21": "%Y-%m-%d %H:%M:%S.%f", 476 "22": "%m/%d/%y %-I:%M:%S %p", 477 "23": "%Y-%m-%d", 478 "24": "%H:%M:%S", 479 "25": "%Y-%m-%d %H:%M:%S.%f", 480 "100": "%b %d %Y %-I:%M%p", 481 "101": "%m/%d/%Y", 482 "102": "%Y.%m.%d", 483 "103": "%d/%m/%Y", 484 "104": "%d.%m.%Y", 485 "105": "%d-%m-%Y", 486 "106": "%d %b %Y", 487 "107": "%b %d, %Y", 488 "108": "%H:%M:%S", 489 "109": "%b %d %Y %-I:%M:%S:%f%p", 490 "110": "%m-%d-%Y", 491 "111": "%Y/%m/%d", 492 "112": "%Y%m%d", 493 "113": "%d %b %Y %H:%M:%S:%f", 494 "114": "%H:%M:%S:%f", 495 "120": "%Y-%m-%d %H:%M:%S", 496 "121": "%Y-%m-%d %H:%M:%S.%f", 497 "126": "%Y-%m-%dT%H:%M:%S.%f", 498 } 499 500 FORMAT_TIME_MAPPING = { 501 "y": "%B %Y", 502 "d": "%m/%d/%Y", 503 "H": "%-H", 504 "h": "%-I", 505 "s": "%Y-%m-%d %H:%M:%S", 506 "D": "%A,%B,%Y", 507 "f": "%A,%B,%Y %-I:%M %p", 508 "F": "%A,%B,%Y %-I:%M:%S %p", 509 "g": "%m/%d/%Y %-I:%M %p", 510 "G": "%m/%d/%Y %-I:%M:%S %p", 511 "M": "%B %-d", 512 "m": "%B %-d", 513 "O": "%Y-%m-%dT%H:%M:%S", 514 "u": "%Y-%M-%D %H:%M:%S%z", 515 "U": "%A, %B %D, %Y %H:%M:%S%z", 516 "T": "%-I:%M:%S %p", 517 "t": "%-I:%M", 518 "Y": "%a %Y", 519 } 520 521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 560 561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 655 **parser.Parser.FUNCTION_PARSERS, 656 "JSON_ARRAYAGG": lambda self: self.expression( 657 exp.JSONArrayAgg, 658 this=self._parse_bitwise(), 659 order=self._parse_order(), 660 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 661 ), 662 } 663 664 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 665 COLUMN_OPERATORS = { 666 **parser.Parser.COLUMN_OPERATORS, 667 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 668 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 669 else self.expression(exp.ScopeResolution, this=this, expression=to), 670 } 671 672 SET_OP_MODIFIERS = {"offset"} 673 674 def _parse_alter_table_set(self) -> exp.AlterSet: 675 return self._parse_wrapped(super()._parse_alter_table_set) 676 677 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 678 if self._match(TokenType.MERGE): 679 comments = self._prev_comments 680 merge = self._parse_merge() 681 merge.add_comments(comments, prepend=True) 682 return merge 683 684 return super()._parse_wrapped_select(table=table) 685 686 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 687 # We want to use _parse_types() if the first token after :: is a known type, 688 # otherwise we could parse something like x::varchar(max) into a function 689 if self._match_set(self.TYPE_TOKENS, advance=False): 690 return self._parse_types() 691 692 return self._parse_function() or self._parse_types() 693 694 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 695 if not self._match(TokenType.OPTION): 696 return None 697 698 def _parse_option() -> t.Optional[exp.Expression]: 699 option = self._parse_var_from_options(OPTIONS) 700 if not option: 701 return None 702 703 self._match(TokenType.EQ) 704 return self.expression( 705 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 706 ) 707 708 return self._parse_wrapped_csv(_parse_option) 709 710 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 711 this = self._parse_primary_or_var() 712 if self._match(TokenType.L_PAREN, advance=False): 713 expression = self._parse_wrapped(self._parse_string) 714 else: 715 expression = None 716 717 return exp.XMLKeyValueOption(this=this, expression=expression) 718 719 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 720 if not self._match_pair(TokenType.FOR, TokenType.XML): 721 return None 722 723 def _parse_for_xml() -> t.Optional[exp.Expression]: 724 return self.expression( 725 exp.QueryOption, 726 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 727 or self._parse_xml_key_value_option(), 728 ) 729 730 return self._parse_csv(_parse_for_xml) 731 732 def _parse_projections(self) -> t.List[exp.Expression]: 733 """ 734 T-SQL supports the syntax alias = expression in the SELECT's projection list, 735 so we transform all parsed Selects to convert their EQ projections into Aliases. 736 737 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 738 """ 739 return [ 740 ( 741 exp.alias_(projection.expression, projection.this.this, copy=False) 742 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 743 else projection 744 ) 745 for projection in super()._parse_projections() 746 ] 747 748 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 749 """Applies to SQL Server and Azure SQL Database 750 COMMIT [ { TRAN | TRANSACTION } 751 [ transaction_name | @tran_name_variable ] ] 752 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 753 754 ROLLBACK { TRAN | TRANSACTION } 755 [ transaction_name | @tran_name_variable 756 | savepoint_name | @savepoint_variable ] 757 """ 758 rollback = self._prev.token_type == TokenType.ROLLBACK 759 760 self._match_texts(("TRAN", "TRANSACTION")) 761 this = self._parse_id_var() 762 763 if rollback: 764 return self.expression(exp.Rollback, this=this) 765 766 durability = None 767 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 768 self._match_text_seq("DELAYED_DURABILITY") 769 self._match(TokenType.EQ) 770 771 if self._match_text_seq("OFF"): 772 durability = False 773 else: 774 self._match(TokenType.ON) 775 durability = True 776 777 self._match_r_paren() 778 779 return self.expression(exp.Commit, this=this, durability=durability) 780 781 def _parse_transaction(self) -> exp.Transaction | exp.Command: 782 """Applies to SQL Server and Azure SQL Database 783 BEGIN { TRAN | TRANSACTION } 784 [ { transaction_name | @tran_name_variable } 785 [ WITH MARK [ 'description' ] ] 786 ] 787 """ 788 if self._match_texts(("TRAN", "TRANSACTION")): 789 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 790 if self._match_text_seq("WITH", "MARK"): 791 transaction.set("mark", self._parse_string()) 792 793 return transaction 794 795 return self._parse_as_command(self._prev) 796 797 def _parse_returns(self) -> exp.ReturnsProperty: 798 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 799 returns = super()._parse_returns() 800 returns.set("table", table) 801 return returns 802 803 def _parse_convert( 804 self, strict: bool, safe: t.Optional[bool] = None 805 ) -> t.Optional[exp.Expression]: 806 this = self._parse_types() 807 self._match(TokenType.COMMA) 808 args = [this, *self._parse_csv(self._parse_assignment)] 809 convert = exp.Convert.from_arg_list(args) 810 convert.set("safe", safe) 811 convert.set("strict", strict) 812 return convert 813 814 def _parse_column_def( 815 self, this: t.Optional[exp.Expression], computed_column: bool = True 816 ) -> t.Optional[exp.Expression]: 817 this = super()._parse_column_def(this=this, computed_column=computed_column) 818 if not this: 819 return None 820 if self._match(TokenType.EQ): 821 this.set("default", self._parse_disjunction()) 822 if self._match_texts(self.COLUMN_DEFINITION_MODES): 823 this.set("output", self._prev.text) 824 return this 825 826 def _parse_user_defined_function( 827 self, kind: t.Optional[TokenType] = None 828 ) -> t.Optional[exp.Expression]: 829 this = super()._parse_user_defined_function(kind=kind) 830 831 if ( 832 kind == TokenType.FUNCTION 833 or isinstance(this, exp.UserDefinedFunction) 834 or self._match(TokenType.ALIAS, advance=False) 835 ): 836 return this 837 838 if not self._match(TokenType.WITH, advance=False): 839 expressions = self._parse_csv(self._parse_function_parameter) 840 else: 841 expressions = None 842 843 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 844 845 def _parse_into(self) -> t.Optional[exp.Into]: 846 into = super()._parse_into() 847 848 table = isinstance(into, exp.Into) and into.find(exp.Table) 849 if isinstance(table, exp.Table): 850 table_identifier = table.this 851 if table_identifier.args.get("temporary"): 852 # Promote the temporary property from the Identifier to the Into expression 853 t.cast(exp.Into, into).set("temporary", True) 854 855 return into 856 857 def _parse_id_var( 858 self, 859 any_token: bool = True, 860 tokens: t.Optional[t.Collection[TokenType]] = None, 861 ) -> t.Optional[exp.Expression]: 862 is_temporary = self._match(TokenType.HASH) 863 is_global = is_temporary and self._match(TokenType.HASH) 864 865 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 866 if this: 867 if is_global: 868 this.set("global", True) 869 elif is_temporary: 870 this.set("temporary", True) 871 872 return this 873 874 def _parse_create(self) -> exp.Create | exp.Command: 875 create = super()._parse_create() 876 877 if isinstance(create, exp.Create): 878 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 879 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 880 if not create.args.get("properties"): 881 create.set("properties", exp.Properties(expressions=[])) 882 883 create.args["properties"].append("expressions", exp.TemporaryProperty()) 884 885 return create 886 887 def _parse_if(self) -> t.Optional[exp.Expression]: 888 index = self._index 889 890 if self._match_text_seq("OBJECT_ID"): 891 self._parse_wrapped_csv(self._parse_string) 892 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 893 return self._parse_drop(exists=True) 894 self._retreat(index) 895 896 return super()._parse_if() 897 898 def _parse_unique(self) -> exp.UniqueColumnConstraint: 899 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 900 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 901 else: 902 this = self._parse_schema(self._parse_id_var(any_token=False)) 903 904 return self.expression(exp.UniqueColumnConstraint, this=this) 905 906 def _parse_partition(self) -> t.Optional[exp.Partition]: 907 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 908 return None 909 910 def parse_range(): 911 low = self._parse_bitwise() 912 high = self._parse_bitwise() if self._match_text_seq("TO") else None 913 914 return ( 915 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 916 ) 917 918 partition = self.expression( 919 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 920 ) 921 922 self._match_r_paren() 923 924 return partition 925 926 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 927 var = self._parse_id_var() 928 if not var: 929 return None 930 931 self._match(TokenType.ALIAS) 932 return self.expression( 933 exp.DeclareItem, 934 this=var, 935 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 936 default=self._match(TokenType.EQ) and self._parse_bitwise(), 937 ) 938 939 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 940 expression = super()._parse_alter_table_alter() 941 942 if expression is not None: 943 collation = expression.args.get("collate") 944 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 945 identifier = collation.this 946 collation.set("this", exp.Var(this=identifier.name)) 947 948 return expression 949 950 class Generator(generator.Generator): 951 LIMIT_IS_TOP = True 952 QUERY_HINTS = False 953 RETURNING_END = False 954 NVL2_SUPPORTED = False 955 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 956 LIMIT_FETCH = "FETCH" 957 COMPUTED_COLUMN_WITH_TYPE = False 958 CTE_RECURSIVE_KEYWORD_REQUIRED = False 959 ENSURE_BOOLS = True 960 NULL_ORDERING_SUPPORTED = None 961 SUPPORTS_SINGLE_ARG_CONCAT = False 962 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 963 SUPPORTS_SELECT_INTO = True 964 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 965 SUPPORTS_TO_NUMBER = False 966 SET_OP_MODIFIERS = False 967 COPY_PARAMS_EQ_REQUIRED = True 968 PARSE_JSON_NAME = None 969 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 970 ALTER_SET_WRAPPED = True 971 ALTER_SET_TYPE = "" 972 973 EXPRESSIONS_WITHOUT_NESTED_CTES = { 974 exp.Create, 975 exp.Delete, 976 exp.Insert, 977 exp.Intersect, 978 exp.Except, 979 exp.Merge, 980 exp.Select, 981 exp.Subquery, 982 exp.Union, 983 exp.Update, 984 } 985 986 SUPPORTED_JSON_PATH_PARTS = { 987 exp.JSONPathKey, 988 exp.JSONPathRoot, 989 exp.JSONPathSubscript, 990 } 991 992 TYPE_MAPPING = { 993 **generator.Generator.TYPE_MAPPING, 994 exp.DataType.Type.BOOLEAN: "BIT", 995 exp.DataType.Type.DATETIME2: "DATETIME2", 996 exp.DataType.Type.DECIMAL: "NUMERIC", 997 exp.DataType.Type.DOUBLE: "FLOAT", 998 exp.DataType.Type.INT: "INTEGER", 999 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1000 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1001 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1004 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1005 exp.DataType.Type.UTINYINT: "TINYINT", 1006 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1007 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1008 } 1009 1010 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1011 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1012 1013 TRANSFORMS = { 1014 **generator.Generator.TRANSFORMS, 1015 exp.AnyValue: any_value_to_max_sql, 1016 exp.ArrayToString: rename_func("STRING_AGG"), 1017 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1018 exp.Chr: rename_func("CHAR"), 1019 exp.DateAdd: date_delta_sql("DATEADD"), 1020 exp.DateDiff: date_delta_sql("DATEDIFF"), 1021 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1022 exp.CurrentDate: rename_func("GETDATE"), 1023 exp.CurrentTimestamp: rename_func("GETDATE"), 1024 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1025 exp.DateStrToDate: datestrtodate_sql, 1026 exp.Extract: rename_func("DATEPART"), 1027 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1028 exp.GroupConcat: _string_agg_sql, 1029 exp.If: rename_func("IIF"), 1030 exp.JSONExtract: _json_extract_sql, 1031 exp.JSONExtractScalar: _json_extract_sql, 1032 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1033 exp.Ln: rename_func("LOG"), 1034 exp.Max: max_or_greatest, 1035 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1036 exp.Min: min_or_least, 1037 exp.NumberToStr: _format_sql, 1038 exp.Repeat: rename_func("REPLICATE"), 1039 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1040 exp.Select: transforms.preprocess( 1041 [ 1042 transforms.eliminate_distinct_on, 1043 transforms.eliminate_semi_and_anti_joins, 1044 transforms.eliminate_qualify, 1045 transforms.unnest_generate_date_array_using_recursive_cte, 1046 ] 1047 ), 1048 exp.Stddev: rename_func("STDEV"), 1049 exp.StrPosition: lambda self, e: strposition_sql( 1050 self, e, func_name="CHARINDEX", supports_position=True 1051 ), 1052 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1053 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1054 exp.SHA2: lambda self, e: self.func( 1055 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1056 ), 1057 exp.TemporaryProperty: lambda self, e: "", 1058 exp.TimeStrToTime: _timestrtotime_sql, 1059 exp.TimeToStr: _format_sql, 1060 exp.Trim: trim_sql, 1061 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1062 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1063 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1064 exp.Uuid: lambda *_: "NEWID()", 1065 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1066 } 1067 1068 TRANSFORMS.pop(exp.ReturnsProperty) 1069 1070 PROPERTIES_LOCATION = { 1071 **generator.Generator.PROPERTIES_LOCATION, 1072 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1073 } 1074 1075 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1076 return f"{scope_name}::{rhs}" 1077 1078 def select_sql(self, expression: exp.Select) -> str: 1079 limit = expression.args.get("limit") 1080 offset = expression.args.get("offset") 1081 1082 if isinstance(limit, exp.Fetch) and not offset: 1083 # Dialects like Oracle can FETCH directly from a row set but 1084 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1085 offset = exp.Offset(expression=exp.Literal.number(0)) 1086 expression.set("offset", offset) 1087 1088 if offset: 1089 if not expression.args.get("order"): 1090 # ORDER BY is required in order to use OFFSET in a query, so we use 1091 # a noop order by, since we don't really care about the order. 1092 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1093 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1094 1095 if isinstance(limit, exp.Limit): 1096 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1097 # we replace here because otherwise TOP would be generated in select_sql 1098 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1099 1100 return super().select_sql(expression) 1101 1102 def convert_sql(self, expression: exp.Convert) -> str: 1103 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1104 return self.func( 1105 name, expression.this, expression.expression, expression.args.get("style") 1106 ) 1107 1108 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1109 option = self.sql(expression, "this") 1110 value = self.sql(expression, "expression") 1111 if value: 1112 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1113 return f"{option} {optional_equal_sign}{value}" 1114 return option 1115 1116 def lateral_op(self, expression: exp.Lateral) -> str: 1117 cross_apply = expression.args.get("cross_apply") 1118 if cross_apply is True: 1119 return "CROSS APPLY" 1120 if cross_apply is False: 1121 return "OUTER APPLY" 1122 1123 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1124 self.unsupported("LATERAL clause is not supported.") 1125 return "LATERAL" 1126 1127 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1128 this = expression.this 1129 split_count = len(this.name.split(".")) 1130 delimiter = expression.args.get("delimiter") 1131 part_index = expression.args.get("part_index") 1132 1133 if ( 1134 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1135 or (delimiter and delimiter.name != ".") 1136 or not part_index 1137 or split_count > 4 1138 ): 1139 self.unsupported( 1140 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1141 ) 1142 return "" 1143 1144 return self.func( 1145 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1146 ) 1147 1148 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1149 nano = expression.args.get("nano") 1150 if nano is not None: 1151 nano.pop() 1152 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1153 1154 if expression.args.get("fractions") is None: 1155 expression.set("fractions", exp.Literal.number(0)) 1156 if expression.args.get("precision") is None: 1157 expression.set("precision", exp.Literal.number(0)) 1158 1159 return rename_func("TIMEFROMPARTS")(self, expression) 1160 1161 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1162 zone = expression.args.get("zone") 1163 if zone is not None: 1164 zone.pop() 1165 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1166 1167 nano = expression.args.get("nano") 1168 if nano is not None: 1169 nano.pop() 1170 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1171 1172 if expression.args.get("milli") is None: 1173 expression.set("milli", exp.Literal.number(0)) 1174 1175 return rename_func("DATETIMEFROMPARTS")(self, expression) 1176 1177 def setitem_sql(self, expression: exp.SetItem) -> str: 1178 this = expression.this 1179 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1180 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1181 return f"{self.sql(this.left)} {self.sql(this.right)}" 1182 1183 return super().setitem_sql(expression) 1184 1185 def boolean_sql(self, expression: exp.Boolean) -> str: 1186 if type(expression.parent) in BIT_TYPES or isinstance( 1187 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1188 ): 1189 return "1" if expression.this else "0" 1190 1191 return "(1 = 1)" if expression.this else "(1 = 0)" 1192 1193 def is_sql(self, expression: exp.Is) -> str: 1194 if isinstance(expression.expression, exp.Boolean): 1195 return self.binary(expression, "=") 1196 return self.binary(expression, "IS") 1197 1198 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1199 sql = self.sql(expression, "this") 1200 properties = expression.args.get("properties") 1201 1202 if sql[:1] != "#" and any( 1203 isinstance(prop, exp.TemporaryProperty) 1204 for prop in (properties.expressions if properties else []) 1205 ): 1206 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1207 1208 return sql 1209 1210 def create_sql(self, expression: exp.Create) -> str: 1211 kind = expression.kind 1212 exists = expression.args.pop("exists", None) 1213 1214 like_property = expression.find(exp.LikeProperty) 1215 if like_property: 1216 ctas_expression = like_property.this 1217 else: 1218 ctas_expression = expression.expression 1219 1220 if kind == "VIEW": 1221 expression.this.set("catalog", None) 1222 with_ = expression.args.get("with") 1223 if ctas_expression and with_: 1224 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1225 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1226 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1227 ctas_expression.set("with", with_.pop()) 1228 1229 table = expression.find(exp.Table) 1230 1231 # Convert CTAS statement to SELECT .. INTO .. 1232 if kind == "TABLE" and ctas_expression: 1233 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1234 ctas_expression = ctas_expression.subquery() 1235 1236 properties = expression.args.get("properties") or exp.Properties() 1237 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1238 1239 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1240 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1241 1242 if like_property: 1243 select_into.limit(0, copy=False) 1244 1245 sql = self.sql(select_into) 1246 else: 1247 sql = super().create_sql(expression) 1248 1249 if exists: 1250 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1251 sql_with_ctes = self.prepend_ctes(expression, sql) 1252 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1253 if kind == "SCHEMA": 1254 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1255 elif kind == "TABLE": 1256 assert table 1257 where = exp.and_( 1258 exp.column("TABLE_NAME").eq(table.name), 1259 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1260 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1261 ) 1262 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1263 elif kind == "INDEX": 1264 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1265 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1266 elif expression.args.get("replace"): 1267 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1268 1269 return self.prepend_ctes(expression, sql) 1270 1271 @generator.unsupported_args("unlogged", "expressions") 1272 def into_sql(self, expression: exp.Into) -> str: 1273 if expression.args.get("temporary"): 1274 # If the Into expression has a temporary property, push this down to the Identifier 1275 table = expression.find(exp.Table) 1276 if table and isinstance(table.this, exp.Identifier): 1277 table.this.set("temporary", True) 1278 1279 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1280 1281 def count_sql(self, expression: exp.Count) -> str: 1282 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1283 return rename_func(func_name)(self, expression) 1284 1285 def offset_sql(self, expression: exp.Offset) -> str: 1286 return f"{super().offset_sql(expression)} ROWS" 1287 1288 def version_sql(self, expression: exp.Version) -> str: 1289 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1290 this = f"FOR {name}" 1291 expr = expression.expression 1292 kind = expression.text("kind") 1293 if kind in ("FROM", "BETWEEN"): 1294 args = expr.expressions 1295 sep = "TO" if kind == "FROM" else "AND" 1296 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1297 else: 1298 expr_sql = self.sql(expr) 1299 1300 expr_sql = f" {expr_sql}" if expr_sql else "" 1301 return f"{this} {kind}{expr_sql}" 1302 1303 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1304 table = expression.args.get("table") 1305 table = f"{table} " if table else "" 1306 return f"RETURNS {table}{self.sql(expression, 'this')}" 1307 1308 def returning_sql(self, expression: exp.Returning) -> str: 1309 into = self.sql(expression, "into") 1310 into = self.seg(f"INTO {into}") if into else "" 1311 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1312 1313 def transaction_sql(self, expression: exp.Transaction) -> str: 1314 this = self.sql(expression, "this") 1315 this = f" {this}" if this else "" 1316 mark = self.sql(expression, "mark") 1317 mark = f" WITH MARK {mark}" if mark else "" 1318 return f"BEGIN TRANSACTION{this}{mark}" 1319 1320 def commit_sql(self, expression: exp.Commit) -> str: 1321 this = self.sql(expression, "this") 1322 this = f" {this}" if this else "" 1323 durability = expression.args.get("durability") 1324 durability = ( 1325 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1326 if durability is not None 1327 else "" 1328 ) 1329 return f"COMMIT TRANSACTION{this}{durability}" 1330 1331 def rollback_sql(self, expression: exp.Rollback) -> str: 1332 this = self.sql(expression, "this") 1333 this = f" {this}" if this else "" 1334 return f"ROLLBACK TRANSACTION{this}" 1335 1336 def identifier_sql(self, expression: exp.Identifier) -> str: 1337 identifier = super().identifier_sql(expression) 1338 1339 if expression.args.get("global"): 1340 identifier = f"##{identifier}" 1341 elif expression.args.get("temporary"): 1342 identifier = f"#{identifier}" 1343 1344 return identifier 1345 1346 def constraint_sql(self, expression: exp.Constraint) -> str: 1347 this = self.sql(expression, "this") 1348 expressions = self.expressions(expression, flat=True, sep=" ") 1349 return f"CONSTRAINT {this} {expressions}" 1350 1351 def length_sql(self, expression: exp.Length) -> str: 1352 return self._uncast_text(expression, "LEN") 1353 1354 def right_sql(self, expression: exp.Right) -> str: 1355 return self._uncast_text(expression, "RIGHT") 1356 1357 def left_sql(self, expression: exp.Left) -> str: 1358 return self._uncast_text(expression, "LEFT") 1359 1360 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1361 this = expression.this 1362 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1363 this_sql = self.sql(this, "this") 1364 else: 1365 this_sql = self.sql(this) 1366 expression_sql = self.sql(expression, "expression") 1367 return self.func(name, this_sql, expression_sql if expression_sql else None) 1368 1369 def partition_sql(self, expression: exp.Partition) -> str: 1370 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1371 1372 def alter_sql(self, expression: exp.Alter) -> str: 1373 action = seq_get(expression.args.get("actions") or [], 0) 1374 if isinstance(action, exp.AlterRename): 1375 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1376 return super().alter_sql(expression) 1377 1378 def drop_sql(self, expression: exp.Drop) -> str: 1379 if expression.args["kind"] == "VIEW": 1380 expression.this.set("catalog", None) 1381 return super().drop_sql(expression) 1382 1383 def options_modifier(self, expression: exp.Expression) -> str: 1384 options = self.expressions(expression, key="options") 1385 return f" OPTION{self.wrap(options)}" if options else "" 1386 1387 def dpipe_sql(self, expression: exp.DPipe) -> str: 1388 return self.sql( 1389 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1390 ) 1391 1392 def isascii_sql(self, expression: exp.IsAscii) -> str: 1393 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1394 1395 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1396 this = super().columndef_sql(expression, sep) 1397 default = self.sql(expression, "default") 1398 default = f" = {default}" if default else "" 1399 output = self.sql(expression, "output") 1400 output = f" {output}" if output else "" 1401 return f"{this}{default}{output}" 1402 1403 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1404 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1405 return rename_func(func_name)(self, expression)
Whether the base comes first in the LOG function.
Possible values: True, False, None (two arguments are not supported by LOG)
Whether the behavior of a / b depends on the types of a and b.
False means a / b is always float division.
True means a / b is integer division if both a and b are integers.
A NULL arg in CONCAT yields NULL by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime formats.
521 class Tokenizer(tokens.Tokenizer): 522 IDENTIFIERS = [("[", "]"), '"'] 523 QUOTES = ["'", '"'] 524 HEX_STRINGS = [("0x", ""), ("0X", "")] 525 VAR_SINGLE_TOKENS = {"@", "$", "#"} 526 527 KEYWORDS = { 528 **tokens.Tokenizer.KEYWORDS, 529 "CLUSTERED INDEX": TokenType.INDEX, 530 "DATETIME2": TokenType.DATETIME2, 531 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 532 "DECLARE": TokenType.DECLARE, 533 "EXEC": TokenType.COMMAND, 534 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 535 "GO": TokenType.COMMAND, 536 "IMAGE": TokenType.IMAGE, 537 "MONEY": TokenType.MONEY, 538 "NONCLUSTERED INDEX": TokenType.INDEX, 539 "NTEXT": TokenType.TEXT, 540 "OPTION": TokenType.OPTION, 541 "OUTPUT": TokenType.RETURNING, 542 "PRINT": TokenType.COMMAND, 543 "PROC": TokenType.PROCEDURE, 544 "REAL": TokenType.FLOAT, 545 "ROWVERSION": TokenType.ROWVERSION, 546 "SMALLDATETIME": TokenType.SMALLDATETIME, 547 "SMALLMONEY": TokenType.SMALLMONEY, 548 "SQL_VARIANT": TokenType.VARIANT, 549 "SYSTEM_USER": TokenType.CURRENT_USER, 550 "TOP": TokenType.TOP, 551 "TIMESTAMP": TokenType.ROWVERSION, 552 "TINYINT": TokenType.UTINYINT, 553 "UNIQUEIDENTIFIER": TokenType.UUID, 554 "UPDATE STATISTICS": TokenType.COMMAND, 555 "XML": TokenType.XML, 556 } 557 KEYWORDS.pop("/*+") 558 559 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- STRING_ESCAPES
- IDENTIFIER_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- HINT_START
- TOKENS_PRECEDING_HINT
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- use_rs_tokenizer
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
561 class Parser(parser.Parser): 562 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 563 LOG_DEFAULTS_TO_LN = True 564 STRING_ALIASES = True 565 NO_PAREN_IF_COMMANDS = False 566 567 QUERY_MODIFIER_PARSERS = { 568 **parser.Parser.QUERY_MODIFIER_PARSERS, 569 TokenType.OPTION: lambda self: ("options", self._parse_options()), 570 TokenType.FOR: lambda self: ("for", self._parse_for()), 571 } 572 573 # T-SQL does not allow BEGIN to be used as an identifier 574 ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.BEGIN} 575 ALIAS_TOKENS = parser.Parser.ALIAS_TOKENS - {TokenType.BEGIN} 576 TABLE_ALIAS_TOKENS = parser.Parser.TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 577 COMMENT_TABLE_ALIAS_TOKENS = parser.Parser.COMMENT_TABLE_ALIAS_TOKENS - {TokenType.BEGIN} 578 UPDATE_ALIAS_TOKENS = parser.Parser.UPDATE_ALIAS_TOKENS - {TokenType.BEGIN} 579 580 FUNCTIONS = { 581 **parser.Parser.FUNCTIONS, 582 "CHARINDEX": lambda args: exp.StrPosition( 583 this=seq_get(args, 1), 584 substr=seq_get(args, 0), 585 position=seq_get(args, 2), 586 ), 587 "COUNT": lambda args: exp.Count( 588 this=seq_get(args, 0), expressions=args[1:], big_int=False 589 ), 590 "COUNT_BIG": lambda args: exp.Count( 591 this=seq_get(args, 0), expressions=args[1:], big_int=True 592 ), 593 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 594 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 595 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 596 "DATEPART": _build_formatted_time(exp.TimeToStr), 597 "DATETIMEFROMPARTS": _build_datetimefromparts, 598 "EOMONTH": _build_eomonth, 599 "FORMAT": _build_format, 600 "GETDATE": exp.CurrentTimestamp.from_arg_list, 601 "HASHBYTES": _build_hashbytes, 602 "ISNULL": lambda args: build_coalesce(args=args, is_null=True), 603 "JSON_QUERY": _build_json_query, 604 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 605 "LEN": _build_with_arg_as_text(exp.Length), 606 "LEFT": _build_with_arg_as_text(exp.Left), 607 "NEWID": exp.Uuid.from_arg_list, 608 "RIGHT": _build_with_arg_as_text(exp.Right), 609 "PARSENAME": _build_parsename, 610 "REPLICATE": exp.Repeat.from_arg_list, 611 "SCHEMA_NAME": exp.CurrentSchema.from_arg_list, 612 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 613 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 614 "SUSER_NAME": exp.CurrentUser.from_arg_list, 615 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 616 "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list, 617 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 618 "TIMEFROMPARTS": _build_timefromparts, 619 "DATETRUNC": _build_datetrunc, 620 } 621 622 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 623 624 PROCEDURE_OPTIONS = dict.fromkeys( 625 ("ENCRYPTION", "RECOMPILE", "SCHEMABINDING", "NATIVE_COMPILATION", "EXECUTE"), tuple() 626 ) 627 628 COLUMN_DEFINITION_MODES = {"OUT", "OUTPUT", "READONLY"} 629 630 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 631 TokenType.TABLE, 632 *parser.Parser.TYPE_TOKENS, 633 } 634 635 STATEMENT_PARSERS = { 636 **parser.Parser.STATEMENT_PARSERS, 637 TokenType.DECLARE: lambda self: self._parse_declare(), 638 } 639 640 RANGE_PARSERS = { 641 **parser.Parser.RANGE_PARSERS, 642 TokenType.DCOLON: lambda self, this: self.expression( 643 exp.ScopeResolution, 644 this=this, 645 expression=self._parse_function() or self._parse_var(any_token=True), 646 ), 647 } 648 649 NO_PAREN_FUNCTION_PARSERS = { 650 **parser.Parser.NO_PAREN_FUNCTION_PARSERS, 651 "NEXT": lambda self: self._parse_next_value_for(), 652 } 653 654 FUNCTION_PARSERS: t.Dict[str, t.Callable] = { 655 **parser.Parser.FUNCTION_PARSERS, 656 "JSON_ARRAYAGG": lambda self: self.expression( 657 exp.JSONArrayAgg, 658 this=self._parse_bitwise(), 659 order=self._parse_order(), 660 null_handling=self._parse_on_handling("NULL", "NULL", "ABSENT"), 661 ), 662 } 663 664 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 665 COLUMN_OPERATORS = { 666 **parser.Parser.COLUMN_OPERATORS, 667 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 668 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 669 else self.expression(exp.ScopeResolution, this=this, expression=to), 670 } 671 672 SET_OP_MODIFIERS = {"offset"} 673 674 def _parse_alter_table_set(self) -> exp.AlterSet: 675 return self._parse_wrapped(super()._parse_alter_table_set) 676 677 def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: 678 if self._match(TokenType.MERGE): 679 comments = self._prev_comments 680 merge = self._parse_merge() 681 merge.add_comments(comments, prepend=True) 682 return merge 683 684 return super()._parse_wrapped_select(table=table) 685 686 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 687 # We want to use _parse_types() if the first token after :: is a known type, 688 # otherwise we could parse something like x::varchar(max) into a function 689 if self._match_set(self.TYPE_TOKENS, advance=False): 690 return self._parse_types() 691 692 return self._parse_function() or self._parse_types() 693 694 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 695 if not self._match(TokenType.OPTION): 696 return None 697 698 def _parse_option() -> t.Optional[exp.Expression]: 699 option = self._parse_var_from_options(OPTIONS) 700 if not option: 701 return None 702 703 self._match(TokenType.EQ) 704 return self.expression( 705 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 706 ) 707 708 return self._parse_wrapped_csv(_parse_option) 709 710 def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption: 711 this = self._parse_primary_or_var() 712 if self._match(TokenType.L_PAREN, advance=False): 713 expression = self._parse_wrapped(self._parse_string) 714 else: 715 expression = None 716 717 return exp.XMLKeyValueOption(this=this, expression=expression) 718 719 def _parse_for(self) -> t.Optional[t.List[exp.Expression]]: 720 if not self._match_pair(TokenType.FOR, TokenType.XML): 721 return None 722 723 def _parse_for_xml() -> t.Optional[exp.Expression]: 724 return self.expression( 725 exp.QueryOption, 726 this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False) 727 or self._parse_xml_key_value_option(), 728 ) 729 730 return self._parse_csv(_parse_for_xml) 731 732 def _parse_projections(self) -> t.List[exp.Expression]: 733 """ 734 T-SQL supports the syntax alias = expression in the SELECT's projection list, 735 so we transform all parsed Selects to convert their EQ projections into Aliases. 736 737 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 738 """ 739 return [ 740 ( 741 exp.alias_(projection.expression, projection.this.this, copy=False) 742 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 743 else projection 744 ) 745 for projection in super()._parse_projections() 746 ] 747 748 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 749 """Applies to SQL Server and Azure SQL Database 750 COMMIT [ { TRAN | TRANSACTION } 751 [ transaction_name | @tran_name_variable ] ] 752 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 753 754 ROLLBACK { TRAN | TRANSACTION } 755 [ transaction_name | @tran_name_variable 756 | savepoint_name | @savepoint_variable ] 757 """ 758 rollback = self._prev.token_type == TokenType.ROLLBACK 759 760 self._match_texts(("TRAN", "TRANSACTION")) 761 this = self._parse_id_var() 762 763 if rollback: 764 return self.expression(exp.Rollback, this=this) 765 766 durability = None 767 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 768 self._match_text_seq("DELAYED_DURABILITY") 769 self._match(TokenType.EQ) 770 771 if self._match_text_seq("OFF"): 772 durability = False 773 else: 774 self._match(TokenType.ON) 775 durability = True 776 777 self._match_r_paren() 778 779 return self.expression(exp.Commit, this=this, durability=durability) 780 781 def _parse_transaction(self) -> exp.Transaction | exp.Command: 782 """Applies to SQL Server and Azure SQL Database 783 BEGIN { TRAN | TRANSACTION } 784 [ { transaction_name | @tran_name_variable } 785 [ WITH MARK [ 'description' ] ] 786 ] 787 """ 788 if self._match_texts(("TRAN", "TRANSACTION")): 789 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 790 if self._match_text_seq("WITH", "MARK"): 791 transaction.set("mark", self._parse_string()) 792 793 return transaction 794 795 return self._parse_as_command(self._prev) 796 797 def _parse_returns(self) -> exp.ReturnsProperty: 798 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 799 returns = super()._parse_returns() 800 returns.set("table", table) 801 return returns 802 803 def _parse_convert( 804 self, strict: bool, safe: t.Optional[bool] = None 805 ) -> t.Optional[exp.Expression]: 806 this = self._parse_types() 807 self._match(TokenType.COMMA) 808 args = [this, *self._parse_csv(self._parse_assignment)] 809 convert = exp.Convert.from_arg_list(args) 810 convert.set("safe", safe) 811 convert.set("strict", strict) 812 return convert 813 814 def _parse_column_def( 815 self, this: t.Optional[exp.Expression], computed_column: bool = True 816 ) -> t.Optional[exp.Expression]: 817 this = super()._parse_column_def(this=this, computed_column=computed_column) 818 if not this: 819 return None 820 if self._match(TokenType.EQ): 821 this.set("default", self._parse_disjunction()) 822 if self._match_texts(self.COLUMN_DEFINITION_MODES): 823 this.set("output", self._prev.text) 824 return this 825 826 def _parse_user_defined_function( 827 self, kind: t.Optional[TokenType] = None 828 ) -> t.Optional[exp.Expression]: 829 this = super()._parse_user_defined_function(kind=kind) 830 831 if ( 832 kind == TokenType.FUNCTION 833 or isinstance(this, exp.UserDefinedFunction) 834 or self._match(TokenType.ALIAS, advance=False) 835 ): 836 return this 837 838 if not self._match(TokenType.WITH, advance=False): 839 expressions = self._parse_csv(self._parse_function_parameter) 840 else: 841 expressions = None 842 843 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 844 845 def _parse_into(self) -> t.Optional[exp.Into]: 846 into = super()._parse_into() 847 848 table = isinstance(into, exp.Into) and into.find(exp.Table) 849 if isinstance(table, exp.Table): 850 table_identifier = table.this 851 if table_identifier.args.get("temporary"): 852 # Promote the temporary property from the Identifier to the Into expression 853 t.cast(exp.Into, into).set("temporary", True) 854 855 return into 856 857 def _parse_id_var( 858 self, 859 any_token: bool = True, 860 tokens: t.Optional[t.Collection[TokenType]] = None, 861 ) -> t.Optional[exp.Expression]: 862 is_temporary = self._match(TokenType.HASH) 863 is_global = is_temporary and self._match(TokenType.HASH) 864 865 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 866 if this: 867 if is_global: 868 this.set("global", True) 869 elif is_temporary: 870 this.set("temporary", True) 871 872 return this 873 874 def _parse_create(self) -> exp.Create | exp.Command: 875 create = super()._parse_create() 876 877 if isinstance(create, exp.Create): 878 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 879 if isinstance(table, exp.Table) and table.this and table.this.args.get("temporary"): 880 if not create.args.get("properties"): 881 create.set("properties", exp.Properties(expressions=[])) 882 883 create.args["properties"].append("expressions", exp.TemporaryProperty()) 884 885 return create 886 887 def _parse_if(self) -> t.Optional[exp.Expression]: 888 index = self._index 889 890 if self._match_text_seq("OBJECT_ID"): 891 self._parse_wrapped_csv(self._parse_string) 892 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 893 return self._parse_drop(exists=True) 894 self._retreat(index) 895 896 return super()._parse_if() 897 898 def _parse_unique(self) -> exp.UniqueColumnConstraint: 899 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 900 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 901 else: 902 this = self._parse_schema(self._parse_id_var(any_token=False)) 903 904 return self.expression(exp.UniqueColumnConstraint, this=this) 905 906 def _parse_partition(self) -> t.Optional[exp.Partition]: 907 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 908 return None 909 910 def parse_range(): 911 low = self._parse_bitwise() 912 high = self._parse_bitwise() if self._match_text_seq("TO") else None 913 914 return ( 915 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 916 ) 917 918 partition = self.expression( 919 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 920 ) 921 922 self._match_r_paren() 923 924 return partition 925 926 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 927 var = self._parse_id_var() 928 if not var: 929 return None 930 931 self._match(TokenType.ALIAS) 932 return self.expression( 933 exp.DeclareItem, 934 this=var, 935 kind=self._parse_schema() if self._match(TokenType.TABLE) else self._parse_types(), 936 default=self._match(TokenType.EQ) and self._parse_bitwise(), 937 ) 938 939 def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: 940 expression = super()._parse_alter_table_alter() 941 942 if expression is not None: 943 collation = expression.args.get("collate") 944 if isinstance(collation, exp.Column) and isinstance(collation.this, exp.Identifier): 945 identifier = collation.this 946 collation.set("this", exp.Var(this=identifier.name)) 947 948 return expression
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- COLON_PLACEHOLDER_TOKENS
- ARRAY_CONSTRUCTORS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- CAST_COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PIPE_SYNTAX_TRANSFORM_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- QUERY_MODIFIER_TOKENS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- EXECUTE_AS_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- WINDOW_EXCLUDE_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- ANALYZE_STYLES
- ANALYZE_EXPRESSION_PARSERS
- PARTITION_KEYWORDS
- AMBIGUOUS_ALIAS_TOKENS
- OPERATION_MODIFIERS
- RECURSIVE_CTE_SEARCH_KIND
- MODIFIABLES
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- WRAPPED_TRANSFORM_COLUMN_CONSTRAINT
- OPTIONAL_ALIAS_TOKEN_CTE
- ALTER_RENAME_REQUIRES_COLUMN
- JOINS_HAVE_EQUAL_PRECEDENCE
- ZONE_AWARE_TIMESTAMP_CONSTRUCTOR
- MAP_KEYS_ARE_ARBITRARY_EXPRESSIONS
- JSON_EXTRACT_REQUIRES_JSON_EXPRESSION
- ADD_JOIN_ON_TRUE
- SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- parse_set_operation
- build_cast
- errors
- sql
950 class Generator(generator.Generator): 951 LIMIT_IS_TOP = True 952 QUERY_HINTS = False 953 RETURNING_END = False 954 NVL2_SUPPORTED = False 955 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 956 LIMIT_FETCH = "FETCH" 957 COMPUTED_COLUMN_WITH_TYPE = False 958 CTE_RECURSIVE_KEYWORD_REQUIRED = False 959 ENSURE_BOOLS = True 960 NULL_ORDERING_SUPPORTED = None 961 SUPPORTS_SINGLE_ARG_CONCAT = False 962 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 963 SUPPORTS_SELECT_INTO = True 964 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 965 SUPPORTS_TO_NUMBER = False 966 SET_OP_MODIFIERS = False 967 COPY_PARAMS_EQ_REQUIRED = True 968 PARSE_JSON_NAME = None 969 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 970 ALTER_SET_WRAPPED = True 971 ALTER_SET_TYPE = "" 972 973 EXPRESSIONS_WITHOUT_NESTED_CTES = { 974 exp.Create, 975 exp.Delete, 976 exp.Insert, 977 exp.Intersect, 978 exp.Except, 979 exp.Merge, 980 exp.Select, 981 exp.Subquery, 982 exp.Union, 983 exp.Update, 984 } 985 986 SUPPORTED_JSON_PATH_PARTS = { 987 exp.JSONPathKey, 988 exp.JSONPathRoot, 989 exp.JSONPathSubscript, 990 } 991 992 TYPE_MAPPING = { 993 **generator.Generator.TYPE_MAPPING, 994 exp.DataType.Type.BOOLEAN: "BIT", 995 exp.DataType.Type.DATETIME2: "DATETIME2", 996 exp.DataType.Type.DECIMAL: "NUMERIC", 997 exp.DataType.Type.DOUBLE: "FLOAT", 998 exp.DataType.Type.INT: "INTEGER", 999 exp.DataType.Type.ROWVERSION: "ROWVERSION", 1000 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 1001 exp.DataType.Type.TIMESTAMP: "DATETIME2", 1002 exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2", 1003 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 1004 exp.DataType.Type.SMALLDATETIME: "SMALLDATETIME", 1005 exp.DataType.Type.UTINYINT: "TINYINT", 1006 exp.DataType.Type.VARIANT: "SQL_VARIANT", 1007 exp.DataType.Type.UUID: "UNIQUEIDENTIFIER", 1008 } 1009 1010 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 1011 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 1012 1013 TRANSFORMS = { 1014 **generator.Generator.TRANSFORMS, 1015 exp.AnyValue: any_value_to_max_sql, 1016 exp.ArrayToString: rename_func("STRING_AGG"), 1017 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 1018 exp.Chr: rename_func("CHAR"), 1019 exp.DateAdd: date_delta_sql("DATEADD"), 1020 exp.DateDiff: date_delta_sql("DATEDIFF"), 1021 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 1022 exp.CurrentDate: rename_func("GETDATE"), 1023 exp.CurrentTimestamp: rename_func("GETDATE"), 1024 exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"), 1025 exp.DateStrToDate: datestrtodate_sql, 1026 exp.Extract: rename_func("DATEPART"), 1027 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 1028 exp.GroupConcat: _string_agg_sql, 1029 exp.If: rename_func("IIF"), 1030 exp.JSONExtract: _json_extract_sql, 1031 exp.JSONExtractScalar: _json_extract_sql, 1032 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 1033 exp.Ln: rename_func("LOG"), 1034 exp.Max: max_or_greatest, 1035 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 1036 exp.Min: min_or_least, 1037 exp.NumberToStr: _format_sql, 1038 exp.Repeat: rename_func("REPLICATE"), 1039 exp.CurrentSchema: rename_func("SCHEMA_NAME"), 1040 exp.Select: transforms.preprocess( 1041 [ 1042 transforms.eliminate_distinct_on, 1043 transforms.eliminate_semi_and_anti_joins, 1044 transforms.eliminate_qualify, 1045 transforms.unnest_generate_date_array_using_recursive_cte, 1046 ] 1047 ), 1048 exp.Stddev: rename_func("STDEV"), 1049 exp.StrPosition: lambda self, e: strposition_sql( 1050 self, e, func_name="CHARINDEX", supports_position=True 1051 ), 1052 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 1053 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 1054 exp.SHA2: lambda self, e: self.func( 1055 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 1056 ), 1057 exp.TemporaryProperty: lambda self, e: "", 1058 exp.TimeStrToTime: _timestrtotime_sql, 1059 exp.TimeToStr: _format_sql, 1060 exp.Trim: trim_sql, 1061 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 1062 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 1063 exp.TimestampTrunc: lambda self, e: self.func("DATETRUNC", e.unit, e.this), 1064 exp.Uuid: lambda *_: "NEWID()", 1065 exp.DateFromParts: rename_func("DATEFROMPARTS"), 1066 } 1067 1068 TRANSFORMS.pop(exp.ReturnsProperty) 1069 1070 PROPERTIES_LOCATION = { 1071 **generator.Generator.PROPERTIES_LOCATION, 1072 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 1073 } 1074 1075 def scope_resolution(self, rhs: str, scope_name: str) -> str: 1076 return f"{scope_name}::{rhs}" 1077 1078 def select_sql(self, expression: exp.Select) -> str: 1079 limit = expression.args.get("limit") 1080 offset = expression.args.get("offset") 1081 1082 if isinstance(limit, exp.Fetch) and not offset: 1083 # Dialects like Oracle can FETCH directly from a row set but 1084 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1085 offset = exp.Offset(expression=exp.Literal.number(0)) 1086 expression.set("offset", offset) 1087 1088 if offset: 1089 if not expression.args.get("order"): 1090 # ORDER BY is required in order to use OFFSET in a query, so we use 1091 # a noop order by, since we don't really care about the order. 1092 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1093 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1094 1095 if isinstance(limit, exp.Limit): 1096 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1097 # we replace here because otherwise TOP would be generated in select_sql 1098 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1099 1100 return super().select_sql(expression) 1101 1102 def convert_sql(self, expression: exp.Convert) -> str: 1103 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 1104 return self.func( 1105 name, expression.this, expression.expression, expression.args.get("style") 1106 ) 1107 1108 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1109 option = self.sql(expression, "this") 1110 value = self.sql(expression, "expression") 1111 if value: 1112 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1113 return f"{option} {optional_equal_sign}{value}" 1114 return option 1115 1116 def lateral_op(self, expression: exp.Lateral) -> str: 1117 cross_apply = expression.args.get("cross_apply") 1118 if cross_apply is True: 1119 return "CROSS APPLY" 1120 if cross_apply is False: 1121 return "OUTER APPLY" 1122 1123 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1124 self.unsupported("LATERAL clause is not supported.") 1125 return "LATERAL" 1126 1127 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1128 this = expression.this 1129 split_count = len(this.name.split(".")) 1130 delimiter = expression.args.get("delimiter") 1131 part_index = expression.args.get("part_index") 1132 1133 if ( 1134 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1135 or (delimiter and delimiter.name != ".") 1136 or not part_index 1137 or split_count > 4 1138 ): 1139 self.unsupported( 1140 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1141 ) 1142 return "" 1143 1144 return self.func( 1145 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1146 ) 1147 1148 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1149 nano = expression.args.get("nano") 1150 if nano is not None: 1151 nano.pop() 1152 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1153 1154 if expression.args.get("fractions") is None: 1155 expression.set("fractions", exp.Literal.number(0)) 1156 if expression.args.get("precision") is None: 1157 expression.set("precision", exp.Literal.number(0)) 1158 1159 return rename_func("TIMEFROMPARTS")(self, expression) 1160 1161 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1162 zone = expression.args.get("zone") 1163 if zone is not None: 1164 zone.pop() 1165 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1166 1167 nano = expression.args.get("nano") 1168 if nano is not None: 1169 nano.pop() 1170 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1171 1172 if expression.args.get("milli") is None: 1173 expression.set("milli", exp.Literal.number(0)) 1174 1175 return rename_func("DATETIMEFROMPARTS")(self, expression) 1176 1177 def setitem_sql(self, expression: exp.SetItem) -> str: 1178 this = expression.this 1179 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1180 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1181 return f"{self.sql(this.left)} {self.sql(this.right)}" 1182 1183 return super().setitem_sql(expression) 1184 1185 def boolean_sql(self, expression: exp.Boolean) -> str: 1186 if type(expression.parent) in BIT_TYPES or isinstance( 1187 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1188 ): 1189 return "1" if expression.this else "0" 1190 1191 return "(1 = 1)" if expression.this else "(1 = 0)" 1192 1193 def is_sql(self, expression: exp.Is) -> str: 1194 if isinstance(expression.expression, exp.Boolean): 1195 return self.binary(expression, "=") 1196 return self.binary(expression, "IS") 1197 1198 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1199 sql = self.sql(expression, "this") 1200 properties = expression.args.get("properties") 1201 1202 if sql[:1] != "#" and any( 1203 isinstance(prop, exp.TemporaryProperty) 1204 for prop in (properties.expressions if properties else []) 1205 ): 1206 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1207 1208 return sql 1209 1210 def create_sql(self, expression: exp.Create) -> str: 1211 kind = expression.kind 1212 exists = expression.args.pop("exists", None) 1213 1214 like_property = expression.find(exp.LikeProperty) 1215 if like_property: 1216 ctas_expression = like_property.this 1217 else: 1218 ctas_expression = expression.expression 1219 1220 if kind == "VIEW": 1221 expression.this.set("catalog", None) 1222 with_ = expression.args.get("with") 1223 if ctas_expression and with_: 1224 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1225 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1226 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1227 ctas_expression.set("with", with_.pop()) 1228 1229 table = expression.find(exp.Table) 1230 1231 # Convert CTAS statement to SELECT .. INTO .. 1232 if kind == "TABLE" and ctas_expression: 1233 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1234 ctas_expression = ctas_expression.subquery() 1235 1236 properties = expression.args.get("properties") or exp.Properties() 1237 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1238 1239 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1240 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1241 1242 if like_property: 1243 select_into.limit(0, copy=False) 1244 1245 sql = self.sql(select_into) 1246 else: 1247 sql = super().create_sql(expression) 1248 1249 if exists: 1250 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1251 sql_with_ctes = self.prepend_ctes(expression, sql) 1252 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1253 if kind == "SCHEMA": 1254 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1255 elif kind == "TABLE": 1256 assert table 1257 where = exp.and_( 1258 exp.column("TABLE_NAME").eq(table.name), 1259 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1260 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1261 ) 1262 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1263 elif kind == "INDEX": 1264 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1265 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1266 elif expression.args.get("replace"): 1267 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1268 1269 return self.prepend_ctes(expression, sql) 1270 1271 @generator.unsupported_args("unlogged", "expressions") 1272 def into_sql(self, expression: exp.Into) -> str: 1273 if expression.args.get("temporary"): 1274 # If the Into expression has a temporary property, push this down to the Identifier 1275 table = expression.find(exp.Table) 1276 if table and isinstance(table.this, exp.Identifier): 1277 table.this.set("temporary", True) 1278 1279 return f"{self.seg('INTO')} {self.sql(expression, 'this')}" 1280 1281 def count_sql(self, expression: exp.Count) -> str: 1282 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1283 return rename_func(func_name)(self, expression) 1284 1285 def offset_sql(self, expression: exp.Offset) -> str: 1286 return f"{super().offset_sql(expression)} ROWS" 1287 1288 def version_sql(self, expression: exp.Version) -> str: 1289 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1290 this = f"FOR {name}" 1291 expr = expression.expression 1292 kind = expression.text("kind") 1293 if kind in ("FROM", "BETWEEN"): 1294 args = expr.expressions 1295 sep = "TO" if kind == "FROM" else "AND" 1296 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1297 else: 1298 expr_sql = self.sql(expr) 1299 1300 expr_sql = f" {expr_sql}" if expr_sql else "" 1301 return f"{this} {kind}{expr_sql}" 1302 1303 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1304 table = expression.args.get("table") 1305 table = f"{table} " if table else "" 1306 return f"RETURNS {table}{self.sql(expression, 'this')}" 1307 1308 def returning_sql(self, expression: exp.Returning) -> str: 1309 into = self.sql(expression, "into") 1310 into = self.seg(f"INTO {into}") if into else "" 1311 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1312 1313 def transaction_sql(self, expression: exp.Transaction) -> str: 1314 this = self.sql(expression, "this") 1315 this = f" {this}" if this else "" 1316 mark = self.sql(expression, "mark") 1317 mark = f" WITH MARK {mark}" if mark else "" 1318 return f"BEGIN TRANSACTION{this}{mark}" 1319 1320 def commit_sql(self, expression: exp.Commit) -> str: 1321 this = self.sql(expression, "this") 1322 this = f" {this}" if this else "" 1323 durability = expression.args.get("durability") 1324 durability = ( 1325 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1326 if durability is not None 1327 else "" 1328 ) 1329 return f"COMMIT TRANSACTION{this}{durability}" 1330 1331 def rollback_sql(self, expression: exp.Rollback) -> str: 1332 this = self.sql(expression, "this") 1333 this = f" {this}" if this else "" 1334 return f"ROLLBACK TRANSACTION{this}" 1335 1336 def identifier_sql(self, expression: exp.Identifier) -> str: 1337 identifier = super().identifier_sql(expression) 1338 1339 if expression.args.get("global"): 1340 identifier = f"##{identifier}" 1341 elif expression.args.get("temporary"): 1342 identifier = f"#{identifier}" 1343 1344 return identifier 1345 1346 def constraint_sql(self, expression: exp.Constraint) -> str: 1347 this = self.sql(expression, "this") 1348 expressions = self.expressions(expression, flat=True, sep=" ") 1349 return f"CONSTRAINT {this} {expressions}" 1350 1351 def length_sql(self, expression: exp.Length) -> str: 1352 return self._uncast_text(expression, "LEN") 1353 1354 def right_sql(self, expression: exp.Right) -> str: 1355 return self._uncast_text(expression, "RIGHT") 1356 1357 def left_sql(self, expression: exp.Left) -> str: 1358 return self._uncast_text(expression, "LEFT") 1359 1360 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1361 this = expression.this 1362 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1363 this_sql = self.sql(this, "this") 1364 else: 1365 this_sql = self.sql(this) 1366 expression_sql = self.sql(expression, "expression") 1367 return self.func(name, this_sql, expression_sql if expression_sql else None) 1368 1369 def partition_sql(self, expression: exp.Partition) -> str: 1370 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1371 1372 def alter_sql(self, expression: exp.Alter) -> str: 1373 action = seq_get(expression.args.get("actions") or [], 0) 1374 if isinstance(action, exp.AlterRename): 1375 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1376 return super().alter_sql(expression) 1377 1378 def drop_sql(self, expression: exp.Drop) -> str: 1379 if expression.args["kind"] == "VIEW": 1380 expression.this.set("catalog", None) 1381 return super().drop_sql(expression) 1382 1383 def options_modifier(self, expression: exp.Expression) -> str: 1384 options = self.expressions(expression, key="options") 1385 return f" OPTION{self.wrap(options)}" if options else "" 1386 1387 def dpipe_sql(self, expression: exp.DPipe) -> str: 1388 return self.sql( 1389 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1390 ) 1391 1392 def isascii_sql(self, expression: exp.IsAscii) -> str: 1393 return f"(PATINDEX(CONVERT(VARCHAR(MAX), 0x255b5e002d7f5d25) COLLATE Latin1_General_BIN, {self.sql(expression.this)}) = 0)" 1394 1395 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1396 this = super().columndef_sql(expression, sep) 1397 default = self.sql(expression, "default") 1398 default = f" = {default}" if default else "" 1399 output = self.sql(expression, "output") 1400 output = f" {output}" if output else "" 1401 return f"{this}{default}{output}" 1402 1403 def coalesce_sql(self, expression: exp.Coalesce) -> str: 1404 func_name = "ISNULL" if expression.args.get("is_null") else "COALESCE" 1405 return rename_func(func_name)(self, expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHEREclause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
1078 def select_sql(self, expression: exp.Select) -> str: 1079 limit = expression.args.get("limit") 1080 offset = expression.args.get("offset") 1081 1082 if isinstance(limit, exp.Fetch) and not offset: 1083 # Dialects like Oracle can FETCH directly from a row set but 1084 # T-SQL requires an ORDER BY + OFFSET clause in order to FETCH 1085 offset = exp.Offset(expression=exp.Literal.number(0)) 1086 expression.set("offset", offset) 1087 1088 if offset: 1089 if not expression.args.get("order"): 1090 # ORDER BY is required in order to use OFFSET in a query, so we use 1091 # a noop order by, since we don't really care about the order. 1092 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 1093 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 1094 1095 if isinstance(limit, exp.Limit): 1096 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 1097 # we replace here because otherwise TOP would be generated in select_sql 1098 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 1099 1100 return super().select_sql(expression)
1108 def queryoption_sql(self, expression: exp.QueryOption) -> str: 1109 option = self.sql(expression, "this") 1110 value = self.sql(expression, "expression") 1111 if value: 1112 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 1113 return f"{option} {optional_equal_sign}{value}" 1114 return option
1116 def lateral_op(self, expression: exp.Lateral) -> str: 1117 cross_apply = expression.args.get("cross_apply") 1118 if cross_apply is True: 1119 return "CROSS APPLY" 1120 if cross_apply is False: 1121 return "OUTER APPLY" 1122 1123 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 1124 self.unsupported("LATERAL clause is not supported.") 1125 return "LATERAL"
1127 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 1128 this = expression.this 1129 split_count = len(this.name.split(".")) 1130 delimiter = expression.args.get("delimiter") 1131 part_index = expression.args.get("part_index") 1132 1133 if ( 1134 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 1135 or (delimiter and delimiter.name != ".") 1136 or not part_index 1137 or split_count > 4 1138 ): 1139 self.unsupported( 1140 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 1141 ) 1142 return "" 1143 1144 return self.func( 1145 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 1146 )
1148 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1149 nano = expression.args.get("nano") 1150 if nano is not None: 1151 nano.pop() 1152 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1153 1154 if expression.args.get("fractions") is None: 1155 expression.set("fractions", exp.Literal.number(0)) 1156 if expression.args.get("precision") is None: 1157 expression.set("precision", exp.Literal.number(0)) 1158 1159 return rename_func("TIMEFROMPARTS")(self, expression)
1161 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1162 zone = expression.args.get("zone") 1163 if zone is not None: 1164 zone.pop() 1165 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1166 1167 nano = expression.args.get("nano") 1168 if nano is not None: 1169 nano.pop() 1170 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1171 1172 if expression.args.get("milli") is None: 1173 expression.set("milli", exp.Literal.number(0)) 1174 1175 return rename_func("DATETIMEFROMPARTS")(self, expression)
1177 def setitem_sql(self, expression: exp.SetItem) -> str: 1178 this = expression.this 1179 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1180 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1181 return f"{self.sql(this.left)} {self.sql(this.right)}" 1182 1183 return super().setitem_sql(expression)
1198 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1199 sql = self.sql(expression, "this") 1200 properties = expression.args.get("properties") 1201 1202 if sql[:1] != "#" and any( 1203 isinstance(prop, exp.TemporaryProperty) 1204 for prop in (properties.expressions if properties else []) 1205 ): 1206 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1207 1208 return sql
1210 def create_sql(self, expression: exp.Create) -> str: 1211 kind = expression.kind 1212 exists = expression.args.pop("exists", None) 1213 1214 like_property = expression.find(exp.LikeProperty) 1215 if like_property: 1216 ctas_expression = like_property.this 1217 else: 1218 ctas_expression = expression.expression 1219 1220 if kind == "VIEW": 1221 expression.this.set("catalog", None) 1222 with_ = expression.args.get("with") 1223 if ctas_expression and with_: 1224 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1225 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1226 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1227 ctas_expression.set("with", with_.pop()) 1228 1229 table = expression.find(exp.Table) 1230 1231 # Convert CTAS statement to SELECT .. INTO .. 1232 if kind == "TABLE" and ctas_expression: 1233 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1234 ctas_expression = ctas_expression.subquery() 1235 1236 properties = expression.args.get("properties") or exp.Properties() 1237 is_temp = any(isinstance(p, exp.TemporaryProperty) for p in properties.expressions) 1238 1239 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1240 select_into.set("into", exp.Into(this=table, temporary=is_temp)) 1241 1242 if like_property: 1243 select_into.limit(0, copy=False) 1244 1245 sql = self.sql(select_into) 1246 else: 1247 sql = super().create_sql(expression) 1248 1249 if exists: 1250 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1251 sql_with_ctes = self.prepend_ctes(expression, sql) 1252 sql_literal = self.sql(exp.Literal.string(sql_with_ctes)) 1253 if kind == "SCHEMA": 1254 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})""" 1255 elif kind == "TABLE": 1256 assert table 1257 where = exp.and_( 1258 exp.column("TABLE_NAME").eq(table.name), 1259 exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None, 1260 exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None, 1261 ) 1262 return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})""" 1263 elif kind == "INDEX": 1264 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1265 return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})""" 1266 elif expression.args.get("replace"): 1267 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1268 1269 return self.prepend_ctes(expression, sql)
1271 @generator.unsupported_args("unlogged", "expressions") 1272 def into_sql(self, expression: exp.Into) -> str: 1273 if expression.args.get("temporary"): 1274 # If the Into expression has a temporary property, push this down to the Identifier 1275 table = expression.find(exp.Table) 1276 if table and isinstance(table.this, exp.Identifier): 1277 table.this.set("temporary", True) 1278 1279 return f"{self.seg('INTO')} {self.sql(expression, 'this')}"
1288 def version_sql(self, expression: exp.Version) -> str: 1289 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1290 this = f"FOR {name}" 1291 expr = expression.expression 1292 kind = expression.text("kind") 1293 if kind in ("FROM", "BETWEEN"): 1294 args = expr.expressions 1295 sep = "TO" if kind == "FROM" else "AND" 1296 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1297 else: 1298 expr_sql = self.sql(expr) 1299 1300 expr_sql = f" {expr_sql}" if expr_sql else "" 1301 return f"{this} {kind}{expr_sql}"
1320 def commit_sql(self, expression: exp.Commit) -> str: 1321 this = self.sql(expression, "this") 1322 this = f" {this}" if this else "" 1323 durability = expression.args.get("durability") 1324 durability = ( 1325 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1326 if durability is not None 1327 else "" 1328 ) 1329 return f"COMMIT TRANSACTION{this}{durability}"
1336 def identifier_sql(self, expression: exp.Identifier) -> str: 1337 identifier = super().identifier_sql(expression) 1338 1339 if expression.args.get("global"): 1340 identifier = f"##{identifier}" 1341 elif expression.args.get("temporary"): 1342 identifier = f"#{identifier}" 1343 1344 return identifier
1395 def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str: 1396 this = super().columndef_sql(expression, sep) 1397 default = self.sql(expression, "default") 1398 default = f" = {default}" if default else "" 1399 output = self.sql(expression, "output") 1400 output = f" {output}" if output else "" 1401 return f"{this}{default}{output}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- SUPPORTS_WINDOW_EXCLUDE
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- UNICODE_SUBSTITUTE
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- SUPPORTS_MEDIAN
- SUPPORTS_UNIX_SECONDS
- NORMALIZE_EXTRACT_DATE_PARTS
- ARRAY_SIZE_NAME
- ARRAY_SIZE_DIM_REQUIRED
- SUPPORTS_BETWEEN_FLAGS
- SUPPORTS_LIKE_QUANTIFIERS
- MATCH_AGAINST_TABLE_PREFIX
- UNSUPPORTED_TYPES
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- EXPRESSION_PRECEDES_PROPERTIES_CREATABLES
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS
- SAFE_JSON_PATH_KEY_RE
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- sanitize_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- limitoptions_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablefromrows_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- queryband_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- for_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- formatphrase_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterindex_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- altersession_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- addpartition_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- safedivide_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- like_sql
- ilike_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- jsoncast_sql
- try_sql
- log_sql
- use_sql
- binary
- ceil_floor
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- whens_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- uniquekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- generateembedding_sql
- mltranslate_sql
- mlforecast_sql
- featuresattime_sql
- vectorsearch_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodatetime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonextractquote_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- revoke_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql
- todouble_sql
- string_sql
- median_sql
- overflowtruncatebehavior_sql
- unixseconds_sql
- arraysize_sql
- attach_sql
- detach_sql
- attachoption_sql
- watermarkcolumnconstraint_sql
- encodeproperty_sql
- includeproperty_sql
- xmlelement_sql
- xmlkeyvalueoption_sql
- partitionbyrangeproperty_sql
- partitionbyrangepropertydynamic_sql
- unpivotcolumns_sql
- analyzesample_sql
- analyzestatistics_sql
- analyzehistogram_sql
- analyzedelete_sql
- analyzelistchainedrows_sql
- analyzevalidate_sql
- analyze_sql
- xmltable_sql
- xmlnamespace_sql
- export_sql
- declare_sql
- declareitem_sql
- recursivewithsearch_sql
- parameterizedagg_sql
- anonymousaggfunc_sql
- combinedaggfunc_sql
- combinedparameterizedagg_sql
- show_sql
- install_sql
- get_put_sql
- translatecharacters_sql
- decodecase_sql
- semanticview_sql
- getextract_sql
- datefromunixdate_sql
- space_sql
- buildproperty_sql
- refreshtriggerproperty_sql
- modelattribute_sql