sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6 7from sqlglot import exp, generator, parser, tokens, transforms 8from sqlglot.dialects.dialect import ( 9 Dialect, 10 NormalizationStrategy, 11 any_value_to_max_sql, 12 date_delta_sql, 13 generatedasidentitycolumnconstraint_sql, 14 max_or_greatest, 15 min_or_least, 16 build_date_delta, 17 rename_func, 18 timestrtotime_sql, 19 trim_sql, 20) 21from sqlglot.helper import seq_get 22from sqlglot.time import format_time 23from sqlglot.tokens import TokenType 24 25if t.TYPE_CHECKING: 26 from sqlglot._typing import E 27 28FULL_FORMAT_TIME_MAPPING = { 29 "weekday": "%A", 30 "dw": "%A", 31 "w": "%A", 32 "month": "%B", 33 "mm": "%B", 34 "m": "%B", 35} 36 37DATE_DELTA_INTERVAL = { 38 "year": "year", 39 "yyyy": "year", 40 "yy": "year", 41 "quarter": "quarter", 42 "qq": "quarter", 43 "q": "quarter", 44 "month": "month", 45 "mm": "month", 46 "m": "month", 47 "week": "week", 48 "ww": "week", 49 "wk": "week", 50 "day": "day", 51 "dd": "day", 52 "d": "day", 53} 54 55 56DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 57 58# N = Numeric, C=Currency 59TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 60 61DEFAULT_START_DATE = datetime.date(1900, 1, 1) 62 63BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 64 65# Unsupported options: 66# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 67# - TABLE HINT 68OPTIONS: parser.OPTIONS_TYPE = { 69 **dict.fromkeys( 70 ( 71 "DISABLE_OPTIMIZED_PLAN_FORCING", 72 "FAST", 73 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 74 "LABEL", 75 "MAXDOP", 76 "MAXRECURSION", 77 "MAX_GRANT_PERCENT", 78 "MIN_GRANT_PERCENT", 79 "NO_PERFORMANCE_SPOOL", 80 "QUERYTRACEON", 81 "RECOMPILE", 82 ), 83 tuple(), 84 ), 85 "CONCAT": ("UNION",), 86 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 87 "EXPAND": ("VIEWS",), 88 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 89 "HASH": ("GROUP", "JOIN", "UNION"), 90 "KEEP": ("PLAN",), 91 "KEEPFIXED": ("PLAN",), 92 "LOOP": ("JOIN",), 93 "MERGE": ("JOIN", "UNION"), 94 "OPTIMIZE": (("FOR", "UNKNOWN"),), 95 "ORDER": ("GROUP",), 96 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 97 "ROBUST": ("PLAN",), 98 "USE": ("PLAN",), 99} 100 101OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 102 103 104def _build_formatted_time( 105 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 106) -> t.Callable[[t.List], E]: 107 def _builder(args: t.List) -> E: 108 assert len(args) == 2 109 110 return exp_class( 111 this=exp.cast(args[1], "datetime"), 112 format=exp.Literal.string( 113 format_time( 114 args[0].name.lower(), 115 ( 116 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 117 if full_format_mapping 118 else TSQL.TIME_MAPPING 119 ), 120 ) 121 ), 122 ) 123 124 return _builder 125 126 127def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 128 this = seq_get(args, 0) 129 fmt = seq_get(args, 1) 130 culture = seq_get(args, 2) 131 132 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 133 134 if number_fmt: 135 return exp.NumberToStr(this=this, format=fmt, culture=culture) 136 137 if fmt: 138 fmt = exp.Literal.string( 139 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 140 if len(fmt.name) == 1 141 else format_time(fmt.name, TSQL.TIME_MAPPING) 142 ) 143 144 return exp.TimeToStr(this=this, format=fmt, culture=culture) 145 146 147def _build_eomonth(args: t.List) -> exp.LastDay: 148 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 149 month_lag = seq_get(args, 1) 150 151 if month_lag is None: 152 this: exp.Expression = date 153 else: 154 unit = DATE_DELTA_INTERVAL.get("month") 155 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 156 157 return exp.LastDay(this=this) 158 159 160def _build_hashbytes(args: t.List) -> exp.Expression: 161 kind, data = args 162 kind = kind.name.upper() if kind.is_string else "" 163 164 if kind == "MD5": 165 args.pop(0) 166 return exp.MD5(this=data) 167 if kind in ("SHA", "SHA1"): 168 args.pop(0) 169 return exp.SHA(this=data) 170 if kind == "SHA2_256": 171 return exp.SHA2(this=data, length=exp.Literal.number(256)) 172 if kind == "SHA2_512": 173 return exp.SHA2(this=data, length=exp.Literal.number(512)) 174 175 return exp.func("HASHBYTES", *args) 176 177 178DATEPART_ONLY_FORMATS = {"DW", "HOUR", "QUARTER"} 179 180 181def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 182 fmt = expression.args["format"] 183 184 if not isinstance(expression, exp.NumberToStr): 185 if fmt.is_string: 186 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 187 188 name = (mapped_fmt or "").upper() 189 if name in DATEPART_ONLY_FORMATS: 190 return self.func("DATEPART", name, expression.this) 191 192 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 193 else: 194 fmt_sql = self.format_time(expression) or self.sql(fmt) 195 else: 196 fmt_sql = self.sql(fmt) 197 198 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 199 200 201def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 202 this = expression.this 203 distinct = expression.find(exp.Distinct) 204 if distinct: 205 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 206 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 207 this = distinct.pop().expressions[0] 208 209 order = "" 210 if isinstance(expression.this, exp.Order): 211 if expression.this.this: 212 this = expression.this.this.pop() 213 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" # Order has a leading space 214 215 separator = expression.args.get("separator") or exp.Literal.string(",") 216 return f"STRING_AGG({self.format_args(this, separator)}){order}" 217 218 219def _build_date_delta( 220 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 221) -> t.Callable[[t.List], E]: 222 def _builder(args: t.List) -> E: 223 unit = seq_get(args, 0) 224 if unit and unit_mapping: 225 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 226 227 start_date = seq_get(args, 1) 228 if start_date and start_date.is_number: 229 # Numeric types are valid DATETIME values 230 if start_date.is_int: 231 adds = DEFAULT_START_DATE + datetime.timedelta(days=int(start_date.this)) 232 start_date = exp.Literal.string(adds.strftime("%F")) 233 else: 234 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 235 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 236 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 237 238 return exp_class( 239 this=exp.TimeStrToTime(this=seq_get(args, 2)), 240 expression=exp.TimeStrToTime(this=start_date), 241 unit=unit, 242 ) 243 244 return _builder 245 246 247def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 248 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 249 alias = expression.args.get("alias") 250 251 if ( 252 isinstance(expression, (exp.CTE, exp.Subquery)) 253 and isinstance(alias, exp.TableAlias) 254 and not alias.columns 255 ): 256 from sqlglot.optimizer.qualify_columns import qualify_outputs 257 258 # We keep track of the unaliased column projection indexes instead of the expressions 259 # themselves, because the latter are going to be replaced by new nodes when the aliases 260 # are added and hence we won't be able to reach these newly added Alias parents 261 query = expression.this 262 unaliased_column_indexes = ( 263 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 264 ) 265 266 qualify_outputs(query) 267 268 # Preserve the quoting information of columns for newly added Alias nodes 269 query_selects = query.selects 270 for select_index in unaliased_column_indexes: 271 alias = query_selects[select_index] 272 column = alias.this 273 if isinstance(column.this, exp.Identifier): 274 alias.args["alias"].set("quoted", column.this.quoted) 275 276 return expression 277 278 279# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 280def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 281 return exp.TimestampFromParts( 282 year=seq_get(args, 0), 283 month=seq_get(args, 1), 284 day=seq_get(args, 2), 285 hour=seq_get(args, 3), 286 min=seq_get(args, 4), 287 sec=seq_get(args, 5), 288 milli=seq_get(args, 6), 289 ) 290 291 292# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 293def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 294 return exp.TimeFromParts( 295 hour=seq_get(args, 0), 296 min=seq_get(args, 1), 297 sec=seq_get(args, 2), 298 fractions=seq_get(args, 3), 299 precision=seq_get(args, 4), 300 ) 301 302 303def _build_with_arg_as_text( 304 klass: t.Type[exp.Expression], 305) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 306 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 307 this = seq_get(args, 0) 308 309 if this and not this.is_string: 310 this = exp.cast(this, exp.DataType.Type.TEXT) 311 312 expression = seq_get(args, 1) 313 kwargs = {"this": this} 314 315 if expression: 316 kwargs["expression"] = expression 317 318 return klass(**kwargs) 319 320 return _parse 321 322 323def _json_extract_sql( 324 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 325) -> str: 326 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 327 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 328 return self.func("ISNULL", json_query, json_value) 329 330 331class TSQL(Dialect): 332 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 333 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 334 SUPPORTS_SEMI_ANTI_JOIN = False 335 LOG_BASE_FIRST = False 336 TYPED_DIVISION = True 337 CONCAT_COALESCE = True 338 339 TIME_MAPPING = { 340 "year": "%Y", 341 "dayofyear": "%j", 342 "day": "%d", 343 "dy": "%d", 344 "y": "%Y", 345 "week": "%W", 346 "ww": "%W", 347 "wk": "%W", 348 "hour": "%h", 349 "hh": "%I", 350 "minute": "%M", 351 "mi": "%M", 352 "n": "%M", 353 "second": "%S", 354 "ss": "%S", 355 "s": "%-S", 356 "millisecond": "%f", 357 "ms": "%f", 358 "weekday": "%W", 359 "dw": "%W", 360 "month": "%m", 361 "mm": "%M", 362 "m": "%-M", 363 "Y": "%Y", 364 "YYYY": "%Y", 365 "YY": "%y", 366 "MMMM": "%B", 367 "MMM": "%b", 368 "MM": "%m", 369 "M": "%-m", 370 "dddd": "%A", 371 "dd": "%d", 372 "d": "%-d", 373 "HH": "%H", 374 "H": "%-H", 375 "h": "%-I", 376 "S": "%f", 377 "yyyy": "%Y", 378 "yy": "%y", 379 } 380 381 CONVERT_FORMAT_MAPPING = { 382 "0": "%b %d %Y %-I:%M%p", 383 "1": "%m/%d/%y", 384 "2": "%y.%m.%d", 385 "3": "%d/%m/%y", 386 "4": "%d.%m.%y", 387 "5": "%d-%m-%y", 388 "6": "%d %b %y", 389 "7": "%b %d, %y", 390 "8": "%H:%M:%S", 391 "9": "%b %d %Y %-I:%M:%S:%f%p", 392 "10": "mm-dd-yy", 393 "11": "yy/mm/dd", 394 "12": "yymmdd", 395 "13": "%d %b %Y %H:%M:ss:%f", 396 "14": "%H:%M:%S:%f", 397 "20": "%Y-%m-%d %H:%M:%S", 398 "21": "%Y-%m-%d %H:%M:%S.%f", 399 "22": "%m/%d/%y %-I:%M:%S %p", 400 "23": "%Y-%m-%d", 401 "24": "%H:%M:%S", 402 "25": "%Y-%m-%d %H:%M:%S.%f", 403 "100": "%b %d %Y %-I:%M%p", 404 "101": "%m/%d/%Y", 405 "102": "%Y.%m.%d", 406 "103": "%d/%m/%Y", 407 "104": "%d.%m.%Y", 408 "105": "%d-%m-%Y", 409 "106": "%d %b %Y", 410 "107": "%b %d, %Y", 411 "108": "%H:%M:%S", 412 "109": "%b %d %Y %-I:%M:%S:%f%p", 413 "110": "%m-%d-%Y", 414 "111": "%Y/%m/%d", 415 "112": "%Y%m%d", 416 "113": "%d %b %Y %H:%M:%S:%f", 417 "114": "%H:%M:%S:%f", 418 "120": "%Y-%m-%d %H:%M:%S", 419 "121": "%Y-%m-%d %H:%M:%S.%f", 420 } 421 422 FORMAT_TIME_MAPPING = { 423 "y": "%B %Y", 424 "d": "%m/%d/%Y", 425 "H": "%-H", 426 "h": "%-I", 427 "s": "%Y-%m-%d %H:%M:%S", 428 "D": "%A,%B,%Y", 429 "f": "%A,%B,%Y %-I:%M %p", 430 "F": "%A,%B,%Y %-I:%M:%S %p", 431 "g": "%m/%d/%Y %-I:%M %p", 432 "G": "%m/%d/%Y %-I:%M:%S %p", 433 "M": "%B %-d", 434 "m": "%B %-d", 435 "O": "%Y-%m-%dT%H:%M:%S", 436 "u": "%Y-%M-%D %H:%M:%S%z", 437 "U": "%A, %B %D, %Y %H:%M:%S%z", 438 "T": "%-I:%M:%S %p", 439 "t": "%-I:%M", 440 "Y": "%a %Y", 441 } 442 443 class Tokenizer(tokens.Tokenizer): 444 IDENTIFIERS = [("[", "]"), '"'] 445 QUOTES = ["'", '"'] 446 HEX_STRINGS = [("0x", ""), ("0X", "")] 447 VAR_SINGLE_TOKENS = {"@", "$", "#"} 448 449 KEYWORDS = { 450 **tokens.Tokenizer.KEYWORDS, 451 "DATETIME2": TokenType.DATETIME, 452 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 453 "DECLARE": TokenType.COMMAND, 454 "EXEC": TokenType.COMMAND, 455 "IMAGE": TokenType.IMAGE, 456 "MONEY": TokenType.MONEY, 457 "NTEXT": TokenType.TEXT, 458 "PRINT": TokenType.COMMAND, 459 "PROC": TokenType.PROCEDURE, 460 "REAL": TokenType.FLOAT, 461 "ROWVERSION": TokenType.ROWVERSION, 462 "SMALLDATETIME": TokenType.DATETIME, 463 "SMALLMONEY": TokenType.SMALLMONEY, 464 "SQL_VARIANT": TokenType.VARIANT, 465 "TOP": TokenType.TOP, 466 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 467 "UPDATE STATISTICS": TokenType.COMMAND, 468 "XML": TokenType.XML, 469 "OUTPUT": TokenType.RETURNING, 470 "SYSTEM_USER": TokenType.CURRENT_USER, 471 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 472 "OPTION": TokenType.OPTION, 473 } 474 475 class Parser(parser.Parser): 476 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 477 LOG_DEFAULTS_TO_LN = True 478 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 479 STRING_ALIASES = True 480 NO_PAREN_IF_COMMANDS = False 481 482 QUERY_MODIFIER_PARSERS = { 483 **parser.Parser.QUERY_MODIFIER_PARSERS, 484 TokenType.OPTION: lambda self: ("options", self._parse_options()), 485 } 486 487 FUNCTIONS = { 488 **parser.Parser.FUNCTIONS, 489 "CHARINDEX": lambda args: exp.StrPosition( 490 this=seq_get(args, 1), 491 substr=seq_get(args, 0), 492 position=seq_get(args, 2), 493 ), 494 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 495 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 496 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 497 "DATEPART": _build_formatted_time(exp.TimeToStr), 498 "DATETIMEFROMPARTS": _build_datetimefromparts, 499 "EOMONTH": _build_eomonth, 500 "FORMAT": _build_format, 501 "GETDATE": exp.CurrentTimestamp.from_arg_list, 502 "HASHBYTES": _build_hashbytes, 503 "ISNULL": exp.Coalesce.from_arg_list, 504 "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract), 505 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 506 "LEN": _build_with_arg_as_text(exp.Length), 507 "LEFT": _build_with_arg_as_text(exp.Left), 508 "RIGHT": _build_with_arg_as_text(exp.Right), 509 "REPLICATE": exp.Repeat.from_arg_list, 510 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 511 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 512 "SUSER_NAME": exp.CurrentUser.from_arg_list, 513 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 514 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 515 "TIMEFROMPARTS": _build_timefromparts, 516 } 517 518 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 519 520 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 521 TokenType.TABLE, 522 *parser.Parser.TYPE_TOKENS, 523 } 524 525 STATEMENT_PARSERS = { 526 **parser.Parser.STATEMENT_PARSERS, 527 TokenType.END: lambda self: self._parse_command(), 528 } 529 530 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 531 if not self._match(TokenType.OPTION): 532 return None 533 534 def _parse_option() -> t.Optional[exp.Expression]: 535 option = self._parse_var_from_options(OPTIONS) 536 if not option: 537 return None 538 539 self._match(TokenType.EQ) 540 return self.expression( 541 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 542 ) 543 544 return self._parse_wrapped_csv(_parse_option) 545 546 def _parse_projections(self) -> t.List[exp.Expression]: 547 """ 548 T-SQL supports the syntax alias = expression in the SELECT's projection list, 549 so we transform all parsed Selects to convert their EQ projections into Aliases. 550 551 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 552 """ 553 return [ 554 ( 555 exp.alias_(projection.expression, projection.this.this, copy=False) 556 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 557 else projection 558 ) 559 for projection in super()._parse_projections() 560 ] 561 562 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 563 """Applies to SQL Server and Azure SQL Database 564 COMMIT [ { TRAN | TRANSACTION } 565 [ transaction_name | @tran_name_variable ] ] 566 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 567 568 ROLLBACK { TRAN | TRANSACTION } 569 [ transaction_name | @tran_name_variable 570 | savepoint_name | @savepoint_variable ] 571 """ 572 rollback = self._prev.token_type == TokenType.ROLLBACK 573 574 self._match_texts(("TRAN", "TRANSACTION")) 575 this = self._parse_id_var() 576 577 if rollback: 578 return self.expression(exp.Rollback, this=this) 579 580 durability = None 581 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 582 self._match_text_seq("DELAYED_DURABILITY") 583 self._match(TokenType.EQ) 584 585 if self._match_text_seq("OFF"): 586 durability = False 587 else: 588 self._match(TokenType.ON) 589 durability = True 590 591 self._match_r_paren() 592 593 return self.expression(exp.Commit, this=this, durability=durability) 594 595 def _parse_transaction(self) -> exp.Transaction | exp.Command: 596 """Applies to SQL Server and Azure SQL Database 597 BEGIN { TRAN | TRANSACTION } 598 [ { transaction_name | @tran_name_variable } 599 [ WITH MARK [ 'description' ] ] 600 ] 601 """ 602 if self._match_texts(("TRAN", "TRANSACTION")): 603 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 604 if self._match_text_seq("WITH", "MARK"): 605 transaction.set("mark", self._parse_string()) 606 607 return transaction 608 609 return self._parse_as_command(self._prev) 610 611 def _parse_returns(self) -> exp.ReturnsProperty: 612 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 613 returns = super()._parse_returns() 614 returns.set("table", table) 615 return returns 616 617 def _parse_convert( 618 self, strict: bool, safe: t.Optional[bool] = None 619 ) -> t.Optional[exp.Expression]: 620 this = self._parse_types() 621 self._match(TokenType.COMMA) 622 args = [this, *self._parse_csv(self._parse_conjunction)] 623 convert = exp.Convert.from_arg_list(args) 624 convert.set("safe", safe) 625 convert.set("strict", strict) 626 return convert 627 628 def _parse_user_defined_function( 629 self, kind: t.Optional[TokenType] = None 630 ) -> t.Optional[exp.Expression]: 631 this = super()._parse_user_defined_function(kind=kind) 632 633 if ( 634 kind == TokenType.FUNCTION 635 or isinstance(this, exp.UserDefinedFunction) 636 or self._match(TokenType.ALIAS, advance=False) 637 ): 638 return this 639 640 expressions = self._parse_csv(self._parse_function_parameter) 641 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 642 643 def _parse_id_var( 644 self, 645 any_token: bool = True, 646 tokens: t.Optional[t.Collection[TokenType]] = None, 647 ) -> t.Optional[exp.Expression]: 648 is_temporary = self._match(TokenType.HASH) 649 is_global = is_temporary and self._match(TokenType.HASH) 650 651 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 652 if this: 653 if is_global: 654 this.set("global", True) 655 elif is_temporary: 656 this.set("temporary", True) 657 658 return this 659 660 def _parse_create(self) -> exp.Create | exp.Command: 661 create = super()._parse_create() 662 663 if isinstance(create, exp.Create): 664 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 665 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 666 if not create.args.get("properties"): 667 create.set("properties", exp.Properties(expressions=[])) 668 669 create.args["properties"].append("expressions", exp.TemporaryProperty()) 670 671 return create 672 673 def _parse_if(self) -> t.Optional[exp.Expression]: 674 index = self._index 675 676 if self._match_text_seq("OBJECT_ID"): 677 self._parse_wrapped_csv(self._parse_string) 678 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 679 return self._parse_drop(exists=True) 680 self._retreat(index) 681 682 return super()._parse_if() 683 684 def _parse_unique(self) -> exp.UniqueColumnConstraint: 685 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 686 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 687 else: 688 this = self._parse_schema(self._parse_id_var(any_token=False)) 689 690 return self.expression(exp.UniqueColumnConstraint, this=this) 691 692 def _parse_partition(self) -> t.Optional[exp.Partition]: 693 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 694 return None 695 696 def parse_range(): 697 low = self._parse_bitwise() 698 high = self._parse_bitwise() if self._match_text_seq("TO") else None 699 700 return ( 701 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 702 ) 703 704 partition = self.expression( 705 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 706 ) 707 708 self._match_r_paren() 709 710 return partition 711 712 class Generator(generator.Generator): 713 LIMIT_IS_TOP = True 714 QUERY_HINTS = False 715 RETURNING_END = False 716 NVL2_SUPPORTED = False 717 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 718 LIMIT_FETCH = "FETCH" 719 COMPUTED_COLUMN_WITH_TYPE = False 720 CTE_RECURSIVE_KEYWORD_REQUIRED = False 721 ENSURE_BOOLS = True 722 NULL_ORDERING_SUPPORTED = None 723 SUPPORTS_SINGLE_ARG_CONCAT = False 724 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 725 SUPPORTS_SELECT_INTO = True 726 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 727 SUPPORTS_TO_NUMBER = False 728 729 EXPRESSIONS_WITHOUT_NESTED_CTES = { 730 exp.Delete, 731 exp.Insert, 732 exp.Merge, 733 exp.Select, 734 exp.Subquery, 735 exp.Union, 736 exp.Update, 737 } 738 739 SUPPORTED_JSON_PATH_PARTS = { 740 exp.JSONPathKey, 741 exp.JSONPathRoot, 742 exp.JSONPathSubscript, 743 } 744 745 TYPE_MAPPING = { 746 **generator.Generator.TYPE_MAPPING, 747 exp.DataType.Type.BOOLEAN: "BIT", 748 exp.DataType.Type.DECIMAL: "NUMERIC", 749 exp.DataType.Type.DATETIME: "DATETIME2", 750 exp.DataType.Type.DOUBLE: "FLOAT", 751 exp.DataType.Type.INT: "INTEGER", 752 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 753 exp.DataType.Type.TIMESTAMP: "DATETIME2", 754 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 755 exp.DataType.Type.VARIANT: "SQL_VARIANT", 756 } 757 758 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 759 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 760 761 TRANSFORMS = { 762 **generator.Generator.TRANSFORMS, 763 exp.AnyValue: any_value_to_max_sql, 764 exp.ArrayToString: rename_func("STRING_AGG"), 765 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 766 exp.DateAdd: date_delta_sql("DATEADD"), 767 exp.DateDiff: date_delta_sql("DATEDIFF"), 768 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 769 exp.CurrentDate: rename_func("GETDATE"), 770 exp.CurrentTimestamp: rename_func("GETDATE"), 771 exp.Extract: rename_func("DATEPART"), 772 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 773 exp.GroupConcat: _string_agg_sql, 774 exp.If: rename_func("IIF"), 775 exp.JSONExtract: _json_extract_sql, 776 exp.JSONExtractScalar: _json_extract_sql, 777 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 778 exp.Max: max_or_greatest, 779 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 780 exp.Min: min_or_least, 781 exp.NumberToStr: _format_sql, 782 exp.ParseJSON: lambda self, e: self.sql(e, "this"), 783 exp.Select: transforms.preprocess( 784 [ 785 transforms.eliminate_distinct_on, 786 transforms.eliminate_semi_and_anti_joins, 787 transforms.eliminate_qualify, 788 ] 789 ), 790 exp.StrPosition: lambda self, e: self.func( 791 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 792 ), 793 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 794 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 795 exp.SHA2: lambda self, e: self.func( 796 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 797 ), 798 exp.TemporaryProperty: lambda self, e: "", 799 exp.TimeStrToTime: timestrtotime_sql, 800 exp.TimeToStr: _format_sql, 801 exp.Trim: trim_sql, 802 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 803 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 804 } 805 806 TRANSFORMS.pop(exp.ReturnsProperty) 807 808 PROPERTIES_LOCATION = { 809 **generator.Generator.PROPERTIES_LOCATION, 810 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 811 } 812 813 def select_sql(self, expression: exp.Select) -> str: 814 if expression.args.get("offset"): 815 if not expression.args.get("order"): 816 # ORDER BY is required in order to use OFFSET in a query, so we use 817 # a noop order by, since we don't really care about the order. 818 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 819 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 820 821 limit = expression.args.get("limit") 822 if isinstance(limit, exp.Limit): 823 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 824 # we replace here because otherwise TOP would be generated in select_sql 825 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 826 827 return super().select_sql(expression) 828 829 def convert_sql(self, expression: exp.Convert) -> str: 830 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 831 return self.func( 832 name, expression.this, expression.expression, expression.args.get("style") 833 ) 834 835 def queryoption_sql(self, expression: exp.QueryOption) -> str: 836 option = self.sql(expression, "this") 837 value = self.sql(expression, "expression") 838 if value: 839 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 840 return f"{option} {optional_equal_sign}{value}" 841 return option 842 843 def lateral_op(self, expression: exp.Lateral) -> str: 844 cross_apply = expression.args.get("cross_apply") 845 if cross_apply is True: 846 return "CROSS APPLY" 847 if cross_apply is False: 848 return "OUTER APPLY" 849 850 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 851 self.unsupported("LATERAL clause is not supported.") 852 return "LATERAL" 853 854 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 855 nano = expression.args.get("nano") 856 if nano is not None: 857 nano.pop() 858 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 859 860 if expression.args.get("fractions") is None: 861 expression.set("fractions", exp.Literal.number(0)) 862 if expression.args.get("precision") is None: 863 expression.set("precision", exp.Literal.number(0)) 864 865 return rename_func("TIMEFROMPARTS")(self, expression) 866 867 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 868 zone = expression.args.get("zone") 869 if zone is not None: 870 zone.pop() 871 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 872 873 nano = expression.args.get("nano") 874 if nano is not None: 875 nano.pop() 876 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 877 878 if expression.args.get("milli") is None: 879 expression.set("milli", exp.Literal.number(0)) 880 881 return rename_func("DATETIMEFROMPARTS")(self, expression) 882 883 def set_operations(self, expression: exp.Union) -> str: 884 limit = expression.args.get("limit") 885 if limit: 886 return self.sql(expression.limit(limit.pop(), copy=False)) 887 888 return super().set_operations(expression) 889 890 def setitem_sql(self, expression: exp.SetItem) -> str: 891 this = expression.this 892 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 893 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 894 return f"{self.sql(this.left)} {self.sql(this.right)}" 895 896 return super().setitem_sql(expression) 897 898 def boolean_sql(self, expression: exp.Boolean) -> str: 899 if type(expression.parent) in BIT_TYPES: 900 return "1" if expression.this else "0" 901 902 return "(1 = 1)" if expression.this else "(1 = 0)" 903 904 def is_sql(self, expression: exp.Is) -> str: 905 if isinstance(expression.expression, exp.Boolean): 906 return self.binary(expression, "=") 907 return self.binary(expression, "IS") 908 909 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 910 sql = self.sql(expression, "this") 911 properties = expression.args.get("properties") 912 913 if sql[:1] != "#" and any( 914 isinstance(prop, exp.TemporaryProperty) 915 for prop in (properties.expressions if properties else []) 916 ): 917 sql = f"#{sql}" 918 919 return sql 920 921 def create_sql(self, expression: exp.Create) -> str: 922 kind = expression.kind 923 exists = expression.args.pop("exists", None) 924 sql = super().create_sql(expression) 925 926 like_property = expression.find(exp.LikeProperty) 927 if like_property: 928 ctas_expression = like_property.this 929 else: 930 ctas_expression = expression.expression 931 932 table = expression.find(exp.Table) 933 934 # Convert CTAS statement to SELECT .. INTO .. 935 if kind == "TABLE" and ctas_expression: 936 ctas_with = ctas_expression.args.get("with") 937 if ctas_with: 938 ctas_with = ctas_with.pop() 939 940 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 941 ctas_expression = ctas_expression.subquery() 942 943 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 944 select_into.set("into", exp.Into(this=table)) 945 select_into.set("with", ctas_with) 946 947 if like_property: 948 select_into.limit(0, copy=False) 949 950 sql = self.sql(select_into) 951 952 if exists: 953 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 954 sql = self.sql(exp.Literal.string(sql)) 955 if kind == "SCHEMA": 956 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 957 elif kind == "TABLE": 958 assert table 959 where = exp.and_( 960 exp.column("table_name").eq(table.name), 961 exp.column("table_schema").eq(table.db) if table.db else None, 962 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 963 ) 964 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 965 elif kind == "INDEX": 966 index = self.sql(exp.Literal.string(expression.this.text("this"))) 967 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 968 elif expression.args.get("replace"): 969 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 970 971 return self.prepend_ctes(expression, sql) 972 973 def offset_sql(self, expression: exp.Offset) -> str: 974 return f"{super().offset_sql(expression)} ROWS" 975 976 def version_sql(self, expression: exp.Version) -> str: 977 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 978 this = f"FOR {name}" 979 expr = expression.expression 980 kind = expression.text("kind") 981 if kind in ("FROM", "BETWEEN"): 982 args = expr.expressions 983 sep = "TO" if kind == "FROM" else "AND" 984 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 985 else: 986 expr_sql = self.sql(expr) 987 988 expr_sql = f" {expr_sql}" if expr_sql else "" 989 return f"{this} {kind}{expr_sql}" 990 991 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 992 table = expression.args.get("table") 993 table = f"{table} " if table else "" 994 return f"RETURNS {table}{self.sql(expression, 'this')}" 995 996 def returning_sql(self, expression: exp.Returning) -> str: 997 into = self.sql(expression, "into") 998 into = self.seg(f"INTO {into}") if into else "" 999 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1000 1001 def transaction_sql(self, expression: exp.Transaction) -> str: 1002 this = self.sql(expression, "this") 1003 this = f" {this}" if this else "" 1004 mark = self.sql(expression, "mark") 1005 mark = f" WITH MARK {mark}" if mark else "" 1006 return f"BEGIN TRANSACTION{this}{mark}" 1007 1008 def commit_sql(self, expression: exp.Commit) -> str: 1009 this = self.sql(expression, "this") 1010 this = f" {this}" if this else "" 1011 durability = expression.args.get("durability") 1012 durability = ( 1013 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1014 if durability is not None 1015 else "" 1016 ) 1017 return f"COMMIT TRANSACTION{this}{durability}" 1018 1019 def rollback_sql(self, expression: exp.Rollback) -> str: 1020 this = self.sql(expression, "this") 1021 this = f" {this}" if this else "" 1022 return f"ROLLBACK TRANSACTION{this}" 1023 1024 def identifier_sql(self, expression: exp.Identifier) -> str: 1025 identifier = super().identifier_sql(expression) 1026 1027 if expression.args.get("global"): 1028 identifier = f"##{identifier}" 1029 elif expression.args.get("temporary"): 1030 identifier = f"#{identifier}" 1031 1032 return identifier 1033 1034 def constraint_sql(self, expression: exp.Constraint) -> str: 1035 this = self.sql(expression, "this") 1036 expressions = self.expressions(expression, flat=True, sep=" ") 1037 return f"CONSTRAINT {this} {expressions}" 1038 1039 def length_sql(self, expression: exp.Length) -> str: 1040 return self._uncast_text(expression, "LEN") 1041 1042 def right_sql(self, expression: exp.Right) -> str: 1043 return self._uncast_text(expression, "RIGHT") 1044 1045 def left_sql(self, expression: exp.Left) -> str: 1046 return self._uncast_text(expression, "LEFT") 1047 1048 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1049 this = expression.this 1050 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1051 this_sql = self.sql(this, "this") 1052 else: 1053 this_sql = self.sql(this) 1054 expression_sql = self.sql(expression, "expression") 1055 return self.func(name, this_sql, expression_sql if expression_sql else None) 1056 1057 def partition_sql(self, expression: exp.Partition) -> str: 1058 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))"
248def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 249 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 250 alias = expression.args.get("alias") 251 252 if ( 253 isinstance(expression, (exp.CTE, exp.Subquery)) 254 and isinstance(alias, exp.TableAlias) 255 and not alias.columns 256 ): 257 from sqlglot.optimizer.qualify_columns import qualify_outputs 258 259 # We keep track of the unaliased column projection indexes instead of the expressions 260 # themselves, because the latter are going to be replaced by new nodes when the aliases 261 # are added and hence we won't be able to reach these newly added Alias parents 262 query = expression.this 263 unaliased_column_indexes = ( 264 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 265 ) 266 267 qualify_outputs(query) 268 269 # Preserve the quoting information of columns for newly added Alias nodes 270 query_selects = query.selects 271 for select_index in unaliased_column_indexes: 272 alias = query_selects[select_index] 273 column = alias.this 274 if isinstance(column.this, exp.Identifier): 275 alias.args["alias"].set("quoted", column.this.quoted) 276 277 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
332class TSQL(Dialect): 333 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 334 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 335 SUPPORTS_SEMI_ANTI_JOIN = False 336 LOG_BASE_FIRST = False 337 TYPED_DIVISION = True 338 CONCAT_COALESCE = True 339 340 TIME_MAPPING = { 341 "year": "%Y", 342 "dayofyear": "%j", 343 "day": "%d", 344 "dy": "%d", 345 "y": "%Y", 346 "week": "%W", 347 "ww": "%W", 348 "wk": "%W", 349 "hour": "%h", 350 "hh": "%I", 351 "minute": "%M", 352 "mi": "%M", 353 "n": "%M", 354 "second": "%S", 355 "ss": "%S", 356 "s": "%-S", 357 "millisecond": "%f", 358 "ms": "%f", 359 "weekday": "%W", 360 "dw": "%W", 361 "month": "%m", 362 "mm": "%M", 363 "m": "%-M", 364 "Y": "%Y", 365 "YYYY": "%Y", 366 "YY": "%y", 367 "MMMM": "%B", 368 "MMM": "%b", 369 "MM": "%m", 370 "M": "%-m", 371 "dddd": "%A", 372 "dd": "%d", 373 "d": "%-d", 374 "HH": "%H", 375 "H": "%-H", 376 "h": "%-I", 377 "S": "%f", 378 "yyyy": "%Y", 379 "yy": "%y", 380 } 381 382 CONVERT_FORMAT_MAPPING = { 383 "0": "%b %d %Y %-I:%M%p", 384 "1": "%m/%d/%y", 385 "2": "%y.%m.%d", 386 "3": "%d/%m/%y", 387 "4": "%d.%m.%y", 388 "5": "%d-%m-%y", 389 "6": "%d %b %y", 390 "7": "%b %d, %y", 391 "8": "%H:%M:%S", 392 "9": "%b %d %Y %-I:%M:%S:%f%p", 393 "10": "mm-dd-yy", 394 "11": "yy/mm/dd", 395 "12": "yymmdd", 396 "13": "%d %b %Y %H:%M:ss:%f", 397 "14": "%H:%M:%S:%f", 398 "20": "%Y-%m-%d %H:%M:%S", 399 "21": "%Y-%m-%d %H:%M:%S.%f", 400 "22": "%m/%d/%y %-I:%M:%S %p", 401 "23": "%Y-%m-%d", 402 "24": "%H:%M:%S", 403 "25": "%Y-%m-%d %H:%M:%S.%f", 404 "100": "%b %d %Y %-I:%M%p", 405 "101": "%m/%d/%Y", 406 "102": "%Y.%m.%d", 407 "103": "%d/%m/%Y", 408 "104": "%d.%m.%Y", 409 "105": "%d-%m-%Y", 410 "106": "%d %b %Y", 411 "107": "%b %d, %Y", 412 "108": "%H:%M:%S", 413 "109": "%b %d %Y %-I:%M:%S:%f%p", 414 "110": "%m-%d-%Y", 415 "111": "%Y/%m/%d", 416 "112": "%Y%m%d", 417 "113": "%d %b %Y %H:%M:%S:%f", 418 "114": "%H:%M:%S:%f", 419 "120": "%Y-%m-%d %H:%M:%S", 420 "121": "%Y-%m-%d %H:%M:%S.%f", 421 } 422 423 FORMAT_TIME_MAPPING = { 424 "y": "%B %Y", 425 "d": "%m/%d/%Y", 426 "H": "%-H", 427 "h": "%-I", 428 "s": "%Y-%m-%d %H:%M:%S", 429 "D": "%A,%B,%Y", 430 "f": "%A,%B,%Y %-I:%M %p", 431 "F": "%A,%B,%Y %-I:%M:%S %p", 432 "g": "%m/%d/%Y %-I:%M %p", 433 "G": "%m/%d/%Y %-I:%M:%S %p", 434 "M": "%B %-d", 435 "m": "%B %-d", 436 "O": "%Y-%m-%dT%H:%M:%S", 437 "u": "%Y-%M-%D %H:%M:%S%z", 438 "U": "%A, %B %D, %Y %H:%M:%S%z", 439 "T": "%-I:%M:%S %p", 440 "t": "%-I:%M", 441 "Y": "%a %Y", 442 } 443 444 class Tokenizer(tokens.Tokenizer): 445 IDENTIFIERS = [("[", "]"), '"'] 446 QUOTES = ["'", '"'] 447 HEX_STRINGS = [("0x", ""), ("0X", "")] 448 VAR_SINGLE_TOKENS = {"@", "$", "#"} 449 450 KEYWORDS = { 451 **tokens.Tokenizer.KEYWORDS, 452 "DATETIME2": TokenType.DATETIME, 453 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 454 "DECLARE": TokenType.COMMAND, 455 "EXEC": TokenType.COMMAND, 456 "IMAGE": TokenType.IMAGE, 457 "MONEY": TokenType.MONEY, 458 "NTEXT": TokenType.TEXT, 459 "PRINT": TokenType.COMMAND, 460 "PROC": TokenType.PROCEDURE, 461 "REAL": TokenType.FLOAT, 462 "ROWVERSION": TokenType.ROWVERSION, 463 "SMALLDATETIME": TokenType.DATETIME, 464 "SMALLMONEY": TokenType.SMALLMONEY, 465 "SQL_VARIANT": TokenType.VARIANT, 466 "TOP": TokenType.TOP, 467 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 468 "UPDATE STATISTICS": TokenType.COMMAND, 469 "XML": TokenType.XML, 470 "OUTPUT": TokenType.RETURNING, 471 "SYSTEM_USER": TokenType.CURRENT_USER, 472 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 473 "OPTION": TokenType.OPTION, 474 } 475 476 class Parser(parser.Parser): 477 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 478 LOG_DEFAULTS_TO_LN = True 479 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 480 STRING_ALIASES = True 481 NO_PAREN_IF_COMMANDS = False 482 483 QUERY_MODIFIER_PARSERS = { 484 **parser.Parser.QUERY_MODIFIER_PARSERS, 485 TokenType.OPTION: lambda self: ("options", self._parse_options()), 486 } 487 488 FUNCTIONS = { 489 **parser.Parser.FUNCTIONS, 490 "CHARINDEX": lambda args: exp.StrPosition( 491 this=seq_get(args, 1), 492 substr=seq_get(args, 0), 493 position=seq_get(args, 2), 494 ), 495 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 496 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 497 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 498 "DATEPART": _build_formatted_time(exp.TimeToStr), 499 "DATETIMEFROMPARTS": _build_datetimefromparts, 500 "EOMONTH": _build_eomonth, 501 "FORMAT": _build_format, 502 "GETDATE": exp.CurrentTimestamp.from_arg_list, 503 "HASHBYTES": _build_hashbytes, 504 "ISNULL": exp.Coalesce.from_arg_list, 505 "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract), 506 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 507 "LEN": _build_with_arg_as_text(exp.Length), 508 "LEFT": _build_with_arg_as_text(exp.Left), 509 "RIGHT": _build_with_arg_as_text(exp.Right), 510 "REPLICATE": exp.Repeat.from_arg_list, 511 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 512 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 513 "SUSER_NAME": exp.CurrentUser.from_arg_list, 514 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 515 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 516 "TIMEFROMPARTS": _build_timefromparts, 517 } 518 519 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 520 521 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 522 TokenType.TABLE, 523 *parser.Parser.TYPE_TOKENS, 524 } 525 526 STATEMENT_PARSERS = { 527 **parser.Parser.STATEMENT_PARSERS, 528 TokenType.END: lambda self: self._parse_command(), 529 } 530 531 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 532 if not self._match(TokenType.OPTION): 533 return None 534 535 def _parse_option() -> t.Optional[exp.Expression]: 536 option = self._parse_var_from_options(OPTIONS) 537 if not option: 538 return None 539 540 self._match(TokenType.EQ) 541 return self.expression( 542 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 543 ) 544 545 return self._parse_wrapped_csv(_parse_option) 546 547 def _parse_projections(self) -> t.List[exp.Expression]: 548 """ 549 T-SQL supports the syntax alias = expression in the SELECT's projection list, 550 so we transform all parsed Selects to convert their EQ projections into Aliases. 551 552 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 553 """ 554 return [ 555 ( 556 exp.alias_(projection.expression, projection.this.this, copy=False) 557 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 558 else projection 559 ) 560 for projection in super()._parse_projections() 561 ] 562 563 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 564 """Applies to SQL Server and Azure SQL Database 565 COMMIT [ { TRAN | TRANSACTION } 566 [ transaction_name | @tran_name_variable ] ] 567 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 568 569 ROLLBACK { TRAN | TRANSACTION } 570 [ transaction_name | @tran_name_variable 571 | savepoint_name | @savepoint_variable ] 572 """ 573 rollback = self._prev.token_type == TokenType.ROLLBACK 574 575 self._match_texts(("TRAN", "TRANSACTION")) 576 this = self._parse_id_var() 577 578 if rollback: 579 return self.expression(exp.Rollback, this=this) 580 581 durability = None 582 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 583 self._match_text_seq("DELAYED_DURABILITY") 584 self._match(TokenType.EQ) 585 586 if self._match_text_seq("OFF"): 587 durability = False 588 else: 589 self._match(TokenType.ON) 590 durability = True 591 592 self._match_r_paren() 593 594 return self.expression(exp.Commit, this=this, durability=durability) 595 596 def _parse_transaction(self) -> exp.Transaction | exp.Command: 597 """Applies to SQL Server and Azure SQL Database 598 BEGIN { TRAN | TRANSACTION } 599 [ { transaction_name | @tran_name_variable } 600 [ WITH MARK [ 'description' ] ] 601 ] 602 """ 603 if self._match_texts(("TRAN", "TRANSACTION")): 604 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 605 if self._match_text_seq("WITH", "MARK"): 606 transaction.set("mark", self._parse_string()) 607 608 return transaction 609 610 return self._parse_as_command(self._prev) 611 612 def _parse_returns(self) -> exp.ReturnsProperty: 613 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 614 returns = super()._parse_returns() 615 returns.set("table", table) 616 return returns 617 618 def _parse_convert( 619 self, strict: bool, safe: t.Optional[bool] = None 620 ) -> t.Optional[exp.Expression]: 621 this = self._parse_types() 622 self._match(TokenType.COMMA) 623 args = [this, *self._parse_csv(self._parse_conjunction)] 624 convert = exp.Convert.from_arg_list(args) 625 convert.set("safe", safe) 626 convert.set("strict", strict) 627 return convert 628 629 def _parse_user_defined_function( 630 self, kind: t.Optional[TokenType] = None 631 ) -> t.Optional[exp.Expression]: 632 this = super()._parse_user_defined_function(kind=kind) 633 634 if ( 635 kind == TokenType.FUNCTION 636 or isinstance(this, exp.UserDefinedFunction) 637 or self._match(TokenType.ALIAS, advance=False) 638 ): 639 return this 640 641 expressions = self._parse_csv(self._parse_function_parameter) 642 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 643 644 def _parse_id_var( 645 self, 646 any_token: bool = True, 647 tokens: t.Optional[t.Collection[TokenType]] = None, 648 ) -> t.Optional[exp.Expression]: 649 is_temporary = self._match(TokenType.HASH) 650 is_global = is_temporary and self._match(TokenType.HASH) 651 652 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 653 if this: 654 if is_global: 655 this.set("global", True) 656 elif is_temporary: 657 this.set("temporary", True) 658 659 return this 660 661 def _parse_create(self) -> exp.Create | exp.Command: 662 create = super()._parse_create() 663 664 if isinstance(create, exp.Create): 665 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 666 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 667 if not create.args.get("properties"): 668 create.set("properties", exp.Properties(expressions=[])) 669 670 create.args["properties"].append("expressions", exp.TemporaryProperty()) 671 672 return create 673 674 def _parse_if(self) -> t.Optional[exp.Expression]: 675 index = self._index 676 677 if self._match_text_seq("OBJECT_ID"): 678 self._parse_wrapped_csv(self._parse_string) 679 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 680 return self._parse_drop(exists=True) 681 self._retreat(index) 682 683 return super()._parse_if() 684 685 def _parse_unique(self) -> exp.UniqueColumnConstraint: 686 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 687 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 688 else: 689 this = self._parse_schema(self._parse_id_var(any_token=False)) 690 691 return self.expression(exp.UniqueColumnConstraint, this=this) 692 693 def _parse_partition(self) -> t.Optional[exp.Partition]: 694 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 695 return None 696 697 def parse_range(): 698 low = self._parse_bitwise() 699 high = self._parse_bitwise() if self._match_text_seq("TO") else None 700 701 return ( 702 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 703 ) 704 705 partition = self.expression( 706 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 707 ) 708 709 self._match_r_paren() 710 711 return partition 712 713 class Generator(generator.Generator): 714 LIMIT_IS_TOP = True 715 QUERY_HINTS = False 716 RETURNING_END = False 717 NVL2_SUPPORTED = False 718 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 719 LIMIT_FETCH = "FETCH" 720 COMPUTED_COLUMN_WITH_TYPE = False 721 CTE_RECURSIVE_KEYWORD_REQUIRED = False 722 ENSURE_BOOLS = True 723 NULL_ORDERING_SUPPORTED = None 724 SUPPORTS_SINGLE_ARG_CONCAT = False 725 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 726 SUPPORTS_SELECT_INTO = True 727 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 728 SUPPORTS_TO_NUMBER = False 729 730 EXPRESSIONS_WITHOUT_NESTED_CTES = { 731 exp.Delete, 732 exp.Insert, 733 exp.Merge, 734 exp.Select, 735 exp.Subquery, 736 exp.Union, 737 exp.Update, 738 } 739 740 SUPPORTED_JSON_PATH_PARTS = { 741 exp.JSONPathKey, 742 exp.JSONPathRoot, 743 exp.JSONPathSubscript, 744 } 745 746 TYPE_MAPPING = { 747 **generator.Generator.TYPE_MAPPING, 748 exp.DataType.Type.BOOLEAN: "BIT", 749 exp.DataType.Type.DECIMAL: "NUMERIC", 750 exp.DataType.Type.DATETIME: "DATETIME2", 751 exp.DataType.Type.DOUBLE: "FLOAT", 752 exp.DataType.Type.INT: "INTEGER", 753 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 754 exp.DataType.Type.TIMESTAMP: "DATETIME2", 755 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 756 exp.DataType.Type.VARIANT: "SQL_VARIANT", 757 } 758 759 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 760 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 761 762 TRANSFORMS = { 763 **generator.Generator.TRANSFORMS, 764 exp.AnyValue: any_value_to_max_sql, 765 exp.ArrayToString: rename_func("STRING_AGG"), 766 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 767 exp.DateAdd: date_delta_sql("DATEADD"), 768 exp.DateDiff: date_delta_sql("DATEDIFF"), 769 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 770 exp.CurrentDate: rename_func("GETDATE"), 771 exp.CurrentTimestamp: rename_func("GETDATE"), 772 exp.Extract: rename_func("DATEPART"), 773 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 774 exp.GroupConcat: _string_agg_sql, 775 exp.If: rename_func("IIF"), 776 exp.JSONExtract: _json_extract_sql, 777 exp.JSONExtractScalar: _json_extract_sql, 778 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 779 exp.Max: max_or_greatest, 780 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 781 exp.Min: min_or_least, 782 exp.NumberToStr: _format_sql, 783 exp.ParseJSON: lambda self, e: self.sql(e, "this"), 784 exp.Select: transforms.preprocess( 785 [ 786 transforms.eliminate_distinct_on, 787 transforms.eliminate_semi_and_anti_joins, 788 transforms.eliminate_qualify, 789 ] 790 ), 791 exp.StrPosition: lambda self, e: self.func( 792 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 793 ), 794 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 795 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 796 exp.SHA2: lambda self, e: self.func( 797 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 798 ), 799 exp.TemporaryProperty: lambda self, e: "", 800 exp.TimeStrToTime: timestrtotime_sql, 801 exp.TimeToStr: _format_sql, 802 exp.Trim: trim_sql, 803 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 804 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 805 } 806 807 TRANSFORMS.pop(exp.ReturnsProperty) 808 809 PROPERTIES_LOCATION = { 810 **generator.Generator.PROPERTIES_LOCATION, 811 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 812 } 813 814 def select_sql(self, expression: exp.Select) -> str: 815 if expression.args.get("offset"): 816 if not expression.args.get("order"): 817 # ORDER BY is required in order to use OFFSET in a query, so we use 818 # a noop order by, since we don't really care about the order. 819 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 820 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 821 822 limit = expression.args.get("limit") 823 if isinstance(limit, exp.Limit): 824 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 825 # we replace here because otherwise TOP would be generated in select_sql 826 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 827 828 return super().select_sql(expression) 829 830 def convert_sql(self, expression: exp.Convert) -> str: 831 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 832 return self.func( 833 name, expression.this, expression.expression, expression.args.get("style") 834 ) 835 836 def queryoption_sql(self, expression: exp.QueryOption) -> str: 837 option = self.sql(expression, "this") 838 value = self.sql(expression, "expression") 839 if value: 840 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 841 return f"{option} {optional_equal_sign}{value}" 842 return option 843 844 def lateral_op(self, expression: exp.Lateral) -> str: 845 cross_apply = expression.args.get("cross_apply") 846 if cross_apply is True: 847 return "CROSS APPLY" 848 if cross_apply is False: 849 return "OUTER APPLY" 850 851 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 852 self.unsupported("LATERAL clause is not supported.") 853 return "LATERAL" 854 855 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 856 nano = expression.args.get("nano") 857 if nano is not None: 858 nano.pop() 859 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 860 861 if expression.args.get("fractions") is None: 862 expression.set("fractions", exp.Literal.number(0)) 863 if expression.args.get("precision") is None: 864 expression.set("precision", exp.Literal.number(0)) 865 866 return rename_func("TIMEFROMPARTS")(self, expression) 867 868 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 869 zone = expression.args.get("zone") 870 if zone is not None: 871 zone.pop() 872 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 873 874 nano = expression.args.get("nano") 875 if nano is not None: 876 nano.pop() 877 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 878 879 if expression.args.get("milli") is None: 880 expression.set("milli", exp.Literal.number(0)) 881 882 return rename_func("DATETIMEFROMPARTS")(self, expression) 883 884 def set_operations(self, expression: exp.Union) -> str: 885 limit = expression.args.get("limit") 886 if limit: 887 return self.sql(expression.limit(limit.pop(), copy=False)) 888 889 return super().set_operations(expression) 890 891 def setitem_sql(self, expression: exp.SetItem) -> str: 892 this = expression.this 893 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 894 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 895 return f"{self.sql(this.left)} {self.sql(this.right)}" 896 897 return super().setitem_sql(expression) 898 899 def boolean_sql(self, expression: exp.Boolean) -> str: 900 if type(expression.parent) in BIT_TYPES: 901 return "1" if expression.this else "0" 902 903 return "(1 = 1)" if expression.this else "(1 = 0)" 904 905 def is_sql(self, expression: exp.Is) -> str: 906 if isinstance(expression.expression, exp.Boolean): 907 return self.binary(expression, "=") 908 return self.binary(expression, "IS") 909 910 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 911 sql = self.sql(expression, "this") 912 properties = expression.args.get("properties") 913 914 if sql[:1] != "#" and any( 915 isinstance(prop, exp.TemporaryProperty) 916 for prop in (properties.expressions if properties else []) 917 ): 918 sql = f"#{sql}" 919 920 return sql 921 922 def create_sql(self, expression: exp.Create) -> str: 923 kind = expression.kind 924 exists = expression.args.pop("exists", None) 925 sql = super().create_sql(expression) 926 927 like_property = expression.find(exp.LikeProperty) 928 if like_property: 929 ctas_expression = like_property.this 930 else: 931 ctas_expression = expression.expression 932 933 table = expression.find(exp.Table) 934 935 # Convert CTAS statement to SELECT .. INTO .. 936 if kind == "TABLE" and ctas_expression: 937 ctas_with = ctas_expression.args.get("with") 938 if ctas_with: 939 ctas_with = ctas_with.pop() 940 941 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 942 ctas_expression = ctas_expression.subquery() 943 944 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 945 select_into.set("into", exp.Into(this=table)) 946 select_into.set("with", ctas_with) 947 948 if like_property: 949 select_into.limit(0, copy=False) 950 951 sql = self.sql(select_into) 952 953 if exists: 954 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 955 sql = self.sql(exp.Literal.string(sql)) 956 if kind == "SCHEMA": 957 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 958 elif kind == "TABLE": 959 assert table 960 where = exp.and_( 961 exp.column("table_name").eq(table.name), 962 exp.column("table_schema").eq(table.db) if table.db else None, 963 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 964 ) 965 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 966 elif kind == "INDEX": 967 index = self.sql(exp.Literal.string(expression.this.text("this"))) 968 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 969 elif expression.args.get("replace"): 970 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 971 972 return self.prepend_ctes(expression, sql) 973 974 def offset_sql(self, expression: exp.Offset) -> str: 975 return f"{super().offset_sql(expression)} ROWS" 976 977 def version_sql(self, expression: exp.Version) -> str: 978 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 979 this = f"FOR {name}" 980 expr = expression.expression 981 kind = expression.text("kind") 982 if kind in ("FROM", "BETWEEN"): 983 args = expr.expressions 984 sep = "TO" if kind == "FROM" else "AND" 985 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 986 else: 987 expr_sql = self.sql(expr) 988 989 expr_sql = f" {expr_sql}" if expr_sql else "" 990 return f"{this} {kind}{expr_sql}" 991 992 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 993 table = expression.args.get("table") 994 table = f"{table} " if table else "" 995 return f"RETURNS {table}{self.sql(expression, 'this')}" 996 997 def returning_sql(self, expression: exp.Returning) -> str: 998 into = self.sql(expression, "into") 999 into = self.seg(f"INTO {into}") if into else "" 1000 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1001 1002 def transaction_sql(self, expression: exp.Transaction) -> str: 1003 this = self.sql(expression, "this") 1004 this = f" {this}" if this else "" 1005 mark = self.sql(expression, "mark") 1006 mark = f" WITH MARK {mark}" if mark else "" 1007 return f"BEGIN TRANSACTION{this}{mark}" 1008 1009 def commit_sql(self, expression: exp.Commit) -> str: 1010 this = self.sql(expression, "this") 1011 this = f" {this}" if this else "" 1012 durability = expression.args.get("durability") 1013 durability = ( 1014 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1015 if durability is not None 1016 else "" 1017 ) 1018 return f"COMMIT TRANSACTION{this}{durability}" 1019 1020 def rollback_sql(self, expression: exp.Rollback) -> str: 1021 this = self.sql(expression, "this") 1022 this = f" {this}" if this else "" 1023 return f"ROLLBACK TRANSACTION{this}" 1024 1025 def identifier_sql(self, expression: exp.Identifier) -> str: 1026 identifier = super().identifier_sql(expression) 1027 1028 if expression.args.get("global"): 1029 identifier = f"##{identifier}" 1030 elif expression.args.get("temporary"): 1031 identifier = f"#{identifier}" 1032 1033 return identifier 1034 1035 def constraint_sql(self, expression: exp.Constraint) -> str: 1036 this = self.sql(expression, "this") 1037 expressions = self.expressions(expression, flat=True, sep=" ") 1038 return f"CONSTRAINT {this} {expressions}" 1039 1040 def length_sql(self, expression: exp.Length) -> str: 1041 return self._uncast_text(expression, "LEN") 1042 1043 def right_sql(self, expression: exp.Right) -> str: 1044 return self._uncast_text(expression, "RIGHT") 1045 1046 def left_sql(self, expression: exp.Left) -> str: 1047 return self._uncast_text(expression, "LEFT") 1048 1049 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1050 this = expression.this 1051 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1052 this_sql = self.sql(this, "this") 1053 else: 1054 this_sql = self.sql(this) 1055 expression_sql = self.sql(expression, "expression") 1056 return self.func(name, this_sql, expression_sql if expression_sql else None) 1057 1058 def partition_sql(self, expression: exp.Partition) -> str: 1059 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))"
Specifies the strategy according to which identifiers should be normalized.
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- NORMALIZE_FUNCTIONS
- NULL_ORDERING
- SAFE_DIVISION
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- ESCAPE_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- get_or_raise
- format_time
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- parser
- generator
444 class Tokenizer(tokens.Tokenizer): 445 IDENTIFIERS = [("[", "]"), '"'] 446 QUOTES = ["'", '"'] 447 HEX_STRINGS = [("0x", ""), ("0X", "")] 448 VAR_SINGLE_TOKENS = {"@", "$", "#"} 449 450 KEYWORDS = { 451 **tokens.Tokenizer.KEYWORDS, 452 "DATETIME2": TokenType.DATETIME, 453 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 454 "DECLARE": TokenType.COMMAND, 455 "EXEC": TokenType.COMMAND, 456 "IMAGE": TokenType.IMAGE, 457 "MONEY": TokenType.MONEY, 458 "NTEXT": TokenType.TEXT, 459 "PRINT": TokenType.COMMAND, 460 "PROC": TokenType.PROCEDURE, 461 "REAL": TokenType.FLOAT, 462 "ROWVERSION": TokenType.ROWVERSION, 463 "SMALLDATETIME": TokenType.DATETIME, 464 "SMALLMONEY": TokenType.SMALLMONEY, 465 "SQL_VARIANT": TokenType.VARIANT, 466 "TOP": TokenType.TOP, 467 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 468 "UPDATE STATISTICS": TokenType.COMMAND, 469 "XML": TokenType.XML, 470 "OUTPUT": TokenType.RETURNING, 471 "SYSTEM_USER": TokenType.CURRENT_USER, 472 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 473 "OPTION": TokenType.OPTION, 474 }
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- IDENTIFIER_ESCAPES
- STRING_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- WHITE_SPACE
- COMMANDS
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
476 class Parser(parser.Parser): 477 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 478 LOG_DEFAULTS_TO_LN = True 479 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 480 STRING_ALIASES = True 481 NO_PAREN_IF_COMMANDS = False 482 483 QUERY_MODIFIER_PARSERS = { 484 **parser.Parser.QUERY_MODIFIER_PARSERS, 485 TokenType.OPTION: lambda self: ("options", self._parse_options()), 486 } 487 488 FUNCTIONS = { 489 **parser.Parser.FUNCTIONS, 490 "CHARINDEX": lambda args: exp.StrPosition( 491 this=seq_get(args, 1), 492 substr=seq_get(args, 0), 493 position=seq_get(args, 2), 494 ), 495 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 496 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 497 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 498 "DATEPART": _build_formatted_time(exp.TimeToStr), 499 "DATETIMEFROMPARTS": _build_datetimefromparts, 500 "EOMONTH": _build_eomonth, 501 "FORMAT": _build_format, 502 "GETDATE": exp.CurrentTimestamp.from_arg_list, 503 "HASHBYTES": _build_hashbytes, 504 "ISNULL": exp.Coalesce.from_arg_list, 505 "JSON_QUERY": parser.build_extract_json_with_path(exp.JSONExtract), 506 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 507 "LEN": _build_with_arg_as_text(exp.Length), 508 "LEFT": _build_with_arg_as_text(exp.Left), 509 "RIGHT": _build_with_arg_as_text(exp.Right), 510 "REPLICATE": exp.Repeat.from_arg_list, 511 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 512 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 513 "SUSER_NAME": exp.CurrentUser.from_arg_list, 514 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 515 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 516 "TIMEFROMPARTS": _build_timefromparts, 517 } 518 519 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 520 521 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 522 TokenType.TABLE, 523 *parser.Parser.TYPE_TOKENS, 524 } 525 526 STATEMENT_PARSERS = { 527 **parser.Parser.STATEMENT_PARSERS, 528 TokenType.END: lambda self: self._parse_command(), 529 } 530 531 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 532 if not self._match(TokenType.OPTION): 533 return None 534 535 def _parse_option() -> t.Optional[exp.Expression]: 536 option = self._parse_var_from_options(OPTIONS) 537 if not option: 538 return None 539 540 self._match(TokenType.EQ) 541 return self.expression( 542 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 543 ) 544 545 return self._parse_wrapped_csv(_parse_option) 546 547 def _parse_projections(self) -> t.List[exp.Expression]: 548 """ 549 T-SQL supports the syntax alias = expression in the SELECT's projection list, 550 so we transform all parsed Selects to convert their EQ projections into Aliases. 551 552 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 553 """ 554 return [ 555 ( 556 exp.alias_(projection.expression, projection.this.this, copy=False) 557 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 558 else projection 559 ) 560 for projection in super()._parse_projections() 561 ] 562 563 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 564 """Applies to SQL Server and Azure SQL Database 565 COMMIT [ { TRAN | TRANSACTION } 566 [ transaction_name | @tran_name_variable ] ] 567 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 568 569 ROLLBACK { TRAN | TRANSACTION } 570 [ transaction_name | @tran_name_variable 571 | savepoint_name | @savepoint_variable ] 572 """ 573 rollback = self._prev.token_type == TokenType.ROLLBACK 574 575 self._match_texts(("TRAN", "TRANSACTION")) 576 this = self._parse_id_var() 577 578 if rollback: 579 return self.expression(exp.Rollback, this=this) 580 581 durability = None 582 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 583 self._match_text_seq("DELAYED_DURABILITY") 584 self._match(TokenType.EQ) 585 586 if self._match_text_seq("OFF"): 587 durability = False 588 else: 589 self._match(TokenType.ON) 590 durability = True 591 592 self._match_r_paren() 593 594 return self.expression(exp.Commit, this=this, durability=durability) 595 596 def _parse_transaction(self) -> exp.Transaction | exp.Command: 597 """Applies to SQL Server and Azure SQL Database 598 BEGIN { TRAN | TRANSACTION } 599 [ { transaction_name | @tran_name_variable } 600 [ WITH MARK [ 'description' ] ] 601 ] 602 """ 603 if self._match_texts(("TRAN", "TRANSACTION")): 604 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 605 if self._match_text_seq("WITH", "MARK"): 606 transaction.set("mark", self._parse_string()) 607 608 return transaction 609 610 return self._parse_as_command(self._prev) 611 612 def _parse_returns(self) -> exp.ReturnsProperty: 613 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 614 returns = super()._parse_returns() 615 returns.set("table", table) 616 return returns 617 618 def _parse_convert( 619 self, strict: bool, safe: t.Optional[bool] = None 620 ) -> t.Optional[exp.Expression]: 621 this = self._parse_types() 622 self._match(TokenType.COMMA) 623 args = [this, *self._parse_csv(self._parse_conjunction)] 624 convert = exp.Convert.from_arg_list(args) 625 convert.set("safe", safe) 626 convert.set("strict", strict) 627 return convert 628 629 def _parse_user_defined_function( 630 self, kind: t.Optional[TokenType] = None 631 ) -> t.Optional[exp.Expression]: 632 this = super()._parse_user_defined_function(kind=kind) 633 634 if ( 635 kind == TokenType.FUNCTION 636 or isinstance(this, exp.UserDefinedFunction) 637 or self._match(TokenType.ALIAS, advance=False) 638 ): 639 return this 640 641 expressions = self._parse_csv(self._parse_function_parameter) 642 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 643 644 def _parse_id_var( 645 self, 646 any_token: bool = True, 647 tokens: t.Optional[t.Collection[TokenType]] = None, 648 ) -> t.Optional[exp.Expression]: 649 is_temporary = self._match(TokenType.HASH) 650 is_global = is_temporary and self._match(TokenType.HASH) 651 652 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 653 if this: 654 if is_global: 655 this.set("global", True) 656 elif is_temporary: 657 this.set("temporary", True) 658 659 return this 660 661 def _parse_create(self) -> exp.Create | exp.Command: 662 create = super()._parse_create() 663 664 if isinstance(create, exp.Create): 665 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 666 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 667 if not create.args.get("properties"): 668 create.set("properties", exp.Properties(expressions=[])) 669 670 create.args["properties"].append("expressions", exp.TemporaryProperty()) 671 672 return create 673 674 def _parse_if(self) -> t.Optional[exp.Expression]: 675 index = self._index 676 677 if self._match_text_seq("OBJECT_ID"): 678 self._parse_wrapped_csv(self._parse_string) 679 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 680 return self._parse_drop(exists=True) 681 self._retreat(index) 682 683 return super()._parse_if() 684 685 def _parse_unique(self) -> exp.UniqueColumnConstraint: 686 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 687 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 688 else: 689 this = self._parse_schema(self._parse_id_var(any_token=False)) 690 691 return self.expression(exp.UniqueColumnConstraint, this=this) 692 693 def _parse_partition(self) -> t.Optional[exp.Partition]: 694 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 695 return None 696 697 def parse_range(): 698 low = self._parse_bitwise() 699 high = self._parse_bitwise() if self._match_text_seq("TO") else None 700 701 return ( 702 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 703 ) 704 705 partition = self.expression( 706 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 707 ) 708 709 self._match_r_paren() 710 711 return partition
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ID_VAR_TOKENS
- INTERVAL_VARS
- ALIAS_TOKENS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- RANGE_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- USABLES
- CAST_ACTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_UNION
- UNION_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
713 class Generator(generator.Generator): 714 LIMIT_IS_TOP = True 715 QUERY_HINTS = False 716 RETURNING_END = False 717 NVL2_SUPPORTED = False 718 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 719 LIMIT_FETCH = "FETCH" 720 COMPUTED_COLUMN_WITH_TYPE = False 721 CTE_RECURSIVE_KEYWORD_REQUIRED = False 722 ENSURE_BOOLS = True 723 NULL_ORDERING_SUPPORTED = None 724 SUPPORTS_SINGLE_ARG_CONCAT = False 725 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 726 SUPPORTS_SELECT_INTO = True 727 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 728 SUPPORTS_TO_NUMBER = False 729 730 EXPRESSIONS_WITHOUT_NESTED_CTES = { 731 exp.Delete, 732 exp.Insert, 733 exp.Merge, 734 exp.Select, 735 exp.Subquery, 736 exp.Union, 737 exp.Update, 738 } 739 740 SUPPORTED_JSON_PATH_PARTS = { 741 exp.JSONPathKey, 742 exp.JSONPathRoot, 743 exp.JSONPathSubscript, 744 } 745 746 TYPE_MAPPING = { 747 **generator.Generator.TYPE_MAPPING, 748 exp.DataType.Type.BOOLEAN: "BIT", 749 exp.DataType.Type.DECIMAL: "NUMERIC", 750 exp.DataType.Type.DATETIME: "DATETIME2", 751 exp.DataType.Type.DOUBLE: "FLOAT", 752 exp.DataType.Type.INT: "INTEGER", 753 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 754 exp.DataType.Type.TIMESTAMP: "DATETIME2", 755 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 756 exp.DataType.Type.VARIANT: "SQL_VARIANT", 757 } 758 759 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 760 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 761 762 TRANSFORMS = { 763 **generator.Generator.TRANSFORMS, 764 exp.AnyValue: any_value_to_max_sql, 765 exp.ArrayToString: rename_func("STRING_AGG"), 766 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 767 exp.DateAdd: date_delta_sql("DATEADD"), 768 exp.DateDiff: date_delta_sql("DATEDIFF"), 769 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 770 exp.CurrentDate: rename_func("GETDATE"), 771 exp.CurrentTimestamp: rename_func("GETDATE"), 772 exp.Extract: rename_func("DATEPART"), 773 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 774 exp.GroupConcat: _string_agg_sql, 775 exp.If: rename_func("IIF"), 776 exp.JSONExtract: _json_extract_sql, 777 exp.JSONExtractScalar: _json_extract_sql, 778 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 779 exp.Max: max_or_greatest, 780 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 781 exp.Min: min_or_least, 782 exp.NumberToStr: _format_sql, 783 exp.ParseJSON: lambda self, e: self.sql(e, "this"), 784 exp.Select: transforms.preprocess( 785 [ 786 transforms.eliminate_distinct_on, 787 transforms.eliminate_semi_and_anti_joins, 788 transforms.eliminate_qualify, 789 ] 790 ), 791 exp.StrPosition: lambda self, e: self.func( 792 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 793 ), 794 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 795 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 796 exp.SHA2: lambda self, e: self.func( 797 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 798 ), 799 exp.TemporaryProperty: lambda self, e: "", 800 exp.TimeStrToTime: timestrtotime_sql, 801 exp.TimeToStr: _format_sql, 802 exp.Trim: trim_sql, 803 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 804 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 805 } 806 807 TRANSFORMS.pop(exp.ReturnsProperty) 808 809 PROPERTIES_LOCATION = { 810 **generator.Generator.PROPERTIES_LOCATION, 811 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 812 } 813 814 def select_sql(self, expression: exp.Select) -> str: 815 if expression.args.get("offset"): 816 if not expression.args.get("order"): 817 # ORDER BY is required in order to use OFFSET in a query, so we use 818 # a noop order by, since we don't really care about the order. 819 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 820 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 821 822 limit = expression.args.get("limit") 823 if isinstance(limit, exp.Limit): 824 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 825 # we replace here because otherwise TOP would be generated in select_sql 826 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 827 828 return super().select_sql(expression) 829 830 def convert_sql(self, expression: exp.Convert) -> str: 831 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 832 return self.func( 833 name, expression.this, expression.expression, expression.args.get("style") 834 ) 835 836 def queryoption_sql(self, expression: exp.QueryOption) -> str: 837 option = self.sql(expression, "this") 838 value = self.sql(expression, "expression") 839 if value: 840 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 841 return f"{option} {optional_equal_sign}{value}" 842 return option 843 844 def lateral_op(self, expression: exp.Lateral) -> str: 845 cross_apply = expression.args.get("cross_apply") 846 if cross_apply is True: 847 return "CROSS APPLY" 848 if cross_apply is False: 849 return "OUTER APPLY" 850 851 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 852 self.unsupported("LATERAL clause is not supported.") 853 return "LATERAL" 854 855 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 856 nano = expression.args.get("nano") 857 if nano is not None: 858 nano.pop() 859 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 860 861 if expression.args.get("fractions") is None: 862 expression.set("fractions", exp.Literal.number(0)) 863 if expression.args.get("precision") is None: 864 expression.set("precision", exp.Literal.number(0)) 865 866 return rename_func("TIMEFROMPARTS")(self, expression) 867 868 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 869 zone = expression.args.get("zone") 870 if zone is not None: 871 zone.pop() 872 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 873 874 nano = expression.args.get("nano") 875 if nano is not None: 876 nano.pop() 877 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 878 879 if expression.args.get("milli") is None: 880 expression.set("milli", exp.Literal.number(0)) 881 882 return rename_func("DATETIMEFROMPARTS")(self, expression) 883 884 def set_operations(self, expression: exp.Union) -> str: 885 limit = expression.args.get("limit") 886 if limit: 887 return self.sql(expression.limit(limit.pop(), copy=False)) 888 889 return super().set_operations(expression) 890 891 def setitem_sql(self, expression: exp.SetItem) -> str: 892 this = expression.this 893 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 894 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 895 return f"{self.sql(this.left)} {self.sql(this.right)}" 896 897 return super().setitem_sql(expression) 898 899 def boolean_sql(self, expression: exp.Boolean) -> str: 900 if type(expression.parent) in BIT_TYPES: 901 return "1" if expression.this else "0" 902 903 return "(1 = 1)" if expression.this else "(1 = 0)" 904 905 def is_sql(self, expression: exp.Is) -> str: 906 if isinstance(expression.expression, exp.Boolean): 907 return self.binary(expression, "=") 908 return self.binary(expression, "IS") 909 910 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 911 sql = self.sql(expression, "this") 912 properties = expression.args.get("properties") 913 914 if sql[:1] != "#" and any( 915 isinstance(prop, exp.TemporaryProperty) 916 for prop in (properties.expressions if properties else []) 917 ): 918 sql = f"#{sql}" 919 920 return sql 921 922 def create_sql(self, expression: exp.Create) -> str: 923 kind = expression.kind 924 exists = expression.args.pop("exists", None) 925 sql = super().create_sql(expression) 926 927 like_property = expression.find(exp.LikeProperty) 928 if like_property: 929 ctas_expression = like_property.this 930 else: 931 ctas_expression = expression.expression 932 933 table = expression.find(exp.Table) 934 935 # Convert CTAS statement to SELECT .. INTO .. 936 if kind == "TABLE" and ctas_expression: 937 ctas_with = ctas_expression.args.get("with") 938 if ctas_with: 939 ctas_with = ctas_with.pop() 940 941 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 942 ctas_expression = ctas_expression.subquery() 943 944 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 945 select_into.set("into", exp.Into(this=table)) 946 select_into.set("with", ctas_with) 947 948 if like_property: 949 select_into.limit(0, copy=False) 950 951 sql = self.sql(select_into) 952 953 if exists: 954 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 955 sql = self.sql(exp.Literal.string(sql)) 956 if kind == "SCHEMA": 957 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 958 elif kind == "TABLE": 959 assert table 960 where = exp.and_( 961 exp.column("table_name").eq(table.name), 962 exp.column("table_schema").eq(table.db) if table.db else None, 963 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 964 ) 965 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 966 elif kind == "INDEX": 967 index = self.sql(exp.Literal.string(expression.this.text("this"))) 968 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 969 elif expression.args.get("replace"): 970 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 971 972 return self.prepend_ctes(expression, sql) 973 974 def offset_sql(self, expression: exp.Offset) -> str: 975 return f"{super().offset_sql(expression)} ROWS" 976 977 def version_sql(self, expression: exp.Version) -> str: 978 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 979 this = f"FOR {name}" 980 expr = expression.expression 981 kind = expression.text("kind") 982 if kind in ("FROM", "BETWEEN"): 983 args = expr.expressions 984 sep = "TO" if kind == "FROM" else "AND" 985 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 986 else: 987 expr_sql = self.sql(expr) 988 989 expr_sql = f" {expr_sql}" if expr_sql else "" 990 return f"{this} {kind}{expr_sql}" 991 992 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 993 table = expression.args.get("table") 994 table = f"{table} " if table else "" 995 return f"RETURNS {table}{self.sql(expression, 'this')}" 996 997 def returning_sql(self, expression: exp.Returning) -> str: 998 into = self.sql(expression, "into") 999 into = self.seg(f"INTO {into}") if into else "" 1000 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1001 1002 def transaction_sql(self, expression: exp.Transaction) -> str: 1003 this = self.sql(expression, "this") 1004 this = f" {this}" if this else "" 1005 mark = self.sql(expression, "mark") 1006 mark = f" WITH MARK {mark}" if mark else "" 1007 return f"BEGIN TRANSACTION{this}{mark}" 1008 1009 def commit_sql(self, expression: exp.Commit) -> str: 1010 this = self.sql(expression, "this") 1011 this = f" {this}" if this else "" 1012 durability = expression.args.get("durability") 1013 durability = ( 1014 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1015 if durability is not None 1016 else "" 1017 ) 1018 return f"COMMIT TRANSACTION{this}{durability}" 1019 1020 def rollback_sql(self, expression: exp.Rollback) -> str: 1021 this = self.sql(expression, "this") 1022 this = f" {this}" if this else "" 1023 return f"ROLLBACK TRANSACTION{this}" 1024 1025 def identifier_sql(self, expression: exp.Identifier) -> str: 1026 identifier = super().identifier_sql(expression) 1027 1028 if expression.args.get("global"): 1029 identifier = f"##{identifier}" 1030 elif expression.args.get("temporary"): 1031 identifier = f"#{identifier}" 1032 1033 return identifier 1034 1035 def constraint_sql(self, expression: exp.Constraint) -> str: 1036 this = self.sql(expression, "this") 1037 expressions = self.expressions(expression, flat=True, sep=" ") 1038 return f"CONSTRAINT {this} {expressions}" 1039 1040 def length_sql(self, expression: exp.Length) -> str: 1041 return self._uncast_text(expression, "LEN") 1042 1043 def right_sql(self, expression: exp.Right) -> str: 1044 return self._uncast_text(expression, "RIGHT") 1045 1046 def left_sql(self, expression: exp.Left) -> str: 1047 return self._uncast_text(expression, "LEFT") 1048 1049 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1050 this = expression.this 1051 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1052 this_sql = self.sql(this, "this") 1053 else: 1054 this_sql = self.sql(this) 1055 expression_sql = self.sql(expression, "expression") 1056 return self.func(name, this_sql, expression_sql if expression_sql else None) 1057 1058 def partition_sql(self, expression: exp.Partition) -> str: 1059 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))"
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. Default: 2.
- indent: The indentation size in a formatted string. Default: 2.
- normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
814 def select_sql(self, expression: exp.Select) -> str: 815 if expression.args.get("offset"): 816 if not expression.args.get("order"): 817 # ORDER BY is required in order to use OFFSET in a query, so we use 818 # a noop order by, since we don't really care about the order. 819 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 820 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 821 822 limit = expression.args.get("limit") 823 if isinstance(limit, exp.Limit): 824 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 825 # we replace here because otherwise TOP would be generated in select_sql 826 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 827 828 return super().select_sql(expression)
836 def queryoption_sql(self, expression: exp.QueryOption) -> str: 837 option = self.sql(expression, "this") 838 value = self.sql(expression, "expression") 839 if value: 840 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 841 return f"{option} {optional_equal_sign}{value}" 842 return option
844 def lateral_op(self, expression: exp.Lateral) -> str: 845 cross_apply = expression.args.get("cross_apply") 846 if cross_apply is True: 847 return "CROSS APPLY" 848 if cross_apply is False: 849 return "OUTER APPLY" 850 851 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 852 self.unsupported("LATERAL clause is not supported.") 853 return "LATERAL"
855 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 856 nano = expression.args.get("nano") 857 if nano is not None: 858 nano.pop() 859 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 860 861 if expression.args.get("fractions") is None: 862 expression.set("fractions", exp.Literal.number(0)) 863 if expression.args.get("precision") is None: 864 expression.set("precision", exp.Literal.number(0)) 865 866 return rename_func("TIMEFROMPARTS")(self, expression)
868 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 869 zone = expression.args.get("zone") 870 if zone is not None: 871 zone.pop() 872 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 873 874 nano = expression.args.get("nano") 875 if nano is not None: 876 nano.pop() 877 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 878 879 if expression.args.get("milli") is None: 880 expression.set("milli", exp.Literal.number(0)) 881 882 return rename_func("DATETIMEFROMPARTS")(self, expression)
891 def setitem_sql(self, expression: exp.SetItem) -> str: 892 this = expression.this 893 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 894 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 895 return f"{self.sql(this.left)} {self.sql(this.right)}" 896 897 return super().setitem_sql(expression)
910 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 911 sql = self.sql(expression, "this") 912 properties = expression.args.get("properties") 913 914 if sql[:1] != "#" and any( 915 isinstance(prop, exp.TemporaryProperty) 916 for prop in (properties.expressions if properties else []) 917 ): 918 sql = f"#{sql}" 919 920 return sql
922 def create_sql(self, expression: exp.Create) -> str: 923 kind = expression.kind 924 exists = expression.args.pop("exists", None) 925 sql = super().create_sql(expression) 926 927 like_property = expression.find(exp.LikeProperty) 928 if like_property: 929 ctas_expression = like_property.this 930 else: 931 ctas_expression = expression.expression 932 933 table = expression.find(exp.Table) 934 935 # Convert CTAS statement to SELECT .. INTO .. 936 if kind == "TABLE" and ctas_expression: 937 ctas_with = ctas_expression.args.get("with") 938 if ctas_with: 939 ctas_with = ctas_with.pop() 940 941 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 942 ctas_expression = ctas_expression.subquery() 943 944 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 945 select_into.set("into", exp.Into(this=table)) 946 select_into.set("with", ctas_with) 947 948 if like_property: 949 select_into.limit(0, copy=False) 950 951 sql = self.sql(select_into) 952 953 if exists: 954 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 955 sql = self.sql(exp.Literal.string(sql)) 956 if kind == "SCHEMA": 957 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 958 elif kind == "TABLE": 959 assert table 960 where = exp.and_( 961 exp.column("table_name").eq(table.name), 962 exp.column("table_schema").eq(table.db) if table.db else None, 963 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 964 ) 965 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 966 elif kind == "INDEX": 967 index = self.sql(exp.Literal.string(expression.this.text("this"))) 968 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 969 elif expression.args.get("replace"): 970 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 971 972 return self.prepend_ctes(expression, sql)
977 def version_sql(self, expression: exp.Version) -> str: 978 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 979 this = f"FOR {name}" 980 expr = expression.expression 981 kind = expression.text("kind") 982 if kind in ("FROM", "BETWEEN"): 983 args = expr.expressions 984 sep = "TO" if kind == "FROM" else "AND" 985 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 986 else: 987 expr_sql = self.sql(expr) 988 989 expr_sql = f" {expr_sql}" if expr_sql else "" 990 return f"{this} {kind}{expr_sql}"
1009 def commit_sql(self, expression: exp.Commit) -> str: 1010 this = self.sql(expression, "this") 1011 this = f" {this}" if this else "" 1012 durability = expression.args.get("durability") 1013 durability = ( 1014 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1015 if durability is not None 1016 else "" 1017 ) 1018 return f"COMMIT TRANSACTION{this}{durability}"
1025 def identifier_sql(self, expression: exp.Identifier) -> str: 1026 identifier = super().identifier_sql(expression) 1027 1028 if expression.args.get("global"): 1029 identifier = f"##{identifier}" 1030 elif expression.args.get("temporary"): 1031 identifier = f"#{identifier}" 1032 1033 return identifier
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- EXPLICIT_UNION
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- COLUMN_JOIN_MARKS_SUPPORTED
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- STAR_MAPPING
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- currenttimestamp_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- renametable_sql
- renamecolumn_sql
- altertable_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- log_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- operator_sql
- toarray_sql
- tsordstotime_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- arrayany_sql
- generateseries_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql