sqlglot.dialects.bigquery
1from __future__ import annotations 2 3import logging 4import re 5import typing as t 6 7from sqlglot import exp, generator, parser, tokens, transforms 8from sqlglot._typing import E 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 arg_max_or_min_no_count, 12 binary_from_function, 13 date_add_interval_sql, 14 datestrtodate_sql, 15 format_time_lambda, 16 if_sql, 17 inline_array_sql, 18 json_keyvalue_comma_sql, 19 max_or_greatest, 20 min_or_least, 21 no_ilike_sql, 22 parse_date_delta_with_interval, 23 regexp_replace_sql, 24 rename_func, 25 timestrtotime_sql, 26 ts_or_ds_to_date_sql, 27) 28from sqlglot.helper import seq_get, split_num_words 29from sqlglot.tokens import TokenType 30 31logger = logging.getLogger("sqlglot") 32 33 34def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Values) -> str: 35 if not expression.find_ancestor(exp.From, exp.Join): 36 return self.values_sql(expression) 37 38 alias = expression.args.get("alias") 39 40 structs = [ 41 exp.Struct( 42 expressions=[ 43 exp.alias_(value, column_name) 44 for value, column_name in zip( 45 t.expressions, 46 alias.columns 47 if alias and alias.columns 48 else (f"_c{i}" for i in range(len(t.expressions))), 49 ) 50 ] 51 ) 52 for t in expression.find_all(exp.Tuple) 53 ] 54 55 return self.unnest_sql(exp.Unnest(expressions=[exp.Array(expressions=structs)])) 56 57 58def _returnsproperty_sql(self: BigQuery.Generator, expression: exp.ReturnsProperty) -> str: 59 this = expression.this 60 if isinstance(this, exp.Schema): 61 this = f"{this.this} <{self.expressions(this)}>" 62 else: 63 this = self.sql(this) 64 return f"RETURNS {this}" 65 66 67def _create_sql(self: BigQuery.Generator, expression: exp.Create) -> str: 68 kind = expression.args["kind"] 69 returns = expression.find(exp.ReturnsProperty) 70 71 if kind.upper() == "FUNCTION" and returns and returns.args.get("is_table"): 72 expression.set("kind", "TABLE FUNCTION") 73 74 if isinstance(expression.expression, (exp.Subquery, exp.Literal)): 75 expression.set("expression", expression.expression.this) 76 77 return self.create_sql(expression) 78 79 return self.create_sql(expression) 80 81 82def _unqualify_unnest(expression: exp.Expression) -> exp.Expression: 83 """Remove references to unnest table aliases since bigquery doesn't allow them. 84 85 These are added by the optimizer's qualify_column step. 86 """ 87 from sqlglot.optimizer.scope import find_all_in_scope 88 89 if isinstance(expression, exp.Select): 90 unnest_aliases = { 91 unnest.alias 92 for unnest in find_all_in_scope(expression, exp.Unnest) 93 if isinstance(unnest.parent, (exp.From, exp.Join)) 94 } 95 if unnest_aliases: 96 for column in expression.find_all(exp.Column): 97 if column.table in unnest_aliases: 98 column.set("table", None) 99 elif column.db in unnest_aliases: 100 column.set("db", None) 101 102 return expression 103 104 105# https://issuetracker.google.com/issues/162294746 106# workaround for bigquery bug when grouping by an expression and then ordering 107# WITH x AS (SELECT 1 y) 108# SELECT y + 1 z 109# FROM x 110# GROUP BY x + 1 111# ORDER by z 112def _alias_ordered_group(expression: exp.Expression) -> exp.Expression: 113 if isinstance(expression, exp.Select): 114 group = expression.args.get("group") 115 order = expression.args.get("order") 116 117 if group and order: 118 aliases = { 119 select.this: select.args["alias"] 120 for select in expression.selects 121 if isinstance(select, exp.Alias) 122 } 123 124 for e in group.expressions: 125 alias = aliases.get(e) 126 127 if alias: 128 e.replace(exp.column(alias)) 129 130 return expression 131 132 133def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression: 134 """BigQuery doesn't allow column names when defining a CTE, so we try to push them down.""" 135 if isinstance(expression, exp.CTE) and expression.alias_column_names: 136 cte_query = expression.this 137 138 if cte_query.is_star: 139 logger.warning( 140 "Can't push down CTE column names for star queries. Run the query through" 141 " the optimizer or use 'qualify' to expand the star projections first." 142 ) 143 return expression 144 145 column_names = expression.alias_column_names 146 expression.args["alias"].set("columns", None) 147 148 for name, select in zip(column_names, cte_query.selects): 149 to_replace = select 150 151 if isinstance(select, exp.Alias): 152 select = select.this 153 154 # Inner aliases are shadowed by the CTE column names 155 to_replace.replace(exp.alias_(select, name)) 156 157 return expression 158 159 160def _parse_timestamp(args: t.List) -> exp.StrToTime: 161 this = format_time_lambda(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)]) 162 this.set("zone", seq_get(args, 2)) 163 return this 164 165 166def _parse_date(args: t.List) -> exp.Date | exp.DateFromParts: 167 expr_type = exp.DateFromParts if len(args) == 3 else exp.Date 168 return expr_type.from_arg_list(args) 169 170 171def _parse_to_hex(args: t.List) -> exp.Hex | exp.MD5: 172 # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation 173 arg = seq_get(args, 0) 174 return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg) 175 176 177class BigQuery(Dialect): 178 UNNEST_COLUMN_ONLY = True 179 SUPPORTS_USER_DEFINED_TYPES = False 180 SUPPORTS_SEMI_ANTI_JOIN = False 181 LOG_BASE_FIRST = False 182 183 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity 184 RESOLVES_IDENTIFIERS_AS_UPPERCASE = None 185 186 # bigquery udfs are case sensitive 187 NORMALIZE_FUNCTIONS = False 188 189 TIME_MAPPING = { 190 "%D": "%m/%d/%y", 191 } 192 193 ESCAPE_SEQUENCES = { 194 "\\a": "\a", 195 "\\b": "\b", 196 "\\f": "\f", 197 "\\n": "\n", 198 "\\r": "\r", 199 "\\t": "\t", 200 "\\v": "\v", 201 } 202 203 FORMAT_MAPPING = { 204 "DD": "%d", 205 "MM": "%m", 206 "MON": "%b", 207 "MONTH": "%B", 208 "YYYY": "%Y", 209 "YY": "%y", 210 "HH": "%I", 211 "HH12": "%I", 212 "HH24": "%H", 213 "MI": "%M", 214 "SS": "%S", 215 "SSSSS": "%f", 216 "TZH": "%z", 217 } 218 219 # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement 220 # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table 221 PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"} 222 223 @classmethod 224 def normalize_identifier(cls, expression: E) -> E: 225 if isinstance(expression, exp.Identifier): 226 parent = expression.parent 227 while isinstance(parent, exp.Dot): 228 parent = parent.parent 229 230 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 231 # The following check is essentially a heuristic to detect tables based on whether or 232 # not they're qualified. It also avoids normalizing UDFs, because they're case-sensitive. 233 if ( 234 not isinstance(parent, exp.UserDefinedFunction) 235 and not (isinstance(parent, exp.Table) and parent.db) 236 and not expression.meta.get("is_table") 237 ): 238 expression.set("this", expression.this.lower()) 239 240 return expression 241 242 class Tokenizer(tokens.Tokenizer): 243 QUOTES = ["'", '"', '"""', "'''"] 244 COMMENTS = ["--", "#", ("/*", "*/")] 245 IDENTIFIERS = ["`"] 246 STRING_ESCAPES = ["\\"] 247 248 HEX_STRINGS = [("0x", ""), ("0X", "")] 249 250 BYTE_STRINGS = [ 251 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 252 ] 253 254 RAW_STRINGS = [ 255 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 256 ] 257 258 KEYWORDS = { 259 **tokens.Tokenizer.KEYWORDS, 260 "ANY TYPE": TokenType.VARIANT, 261 "BEGIN": TokenType.COMMAND, 262 "BEGIN TRANSACTION": TokenType.BEGIN, 263 "BYTES": TokenType.BINARY, 264 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 265 "DECLARE": TokenType.COMMAND, 266 "FLOAT64": TokenType.DOUBLE, 267 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 268 "INT64": TokenType.BIGINT, 269 "MODEL": TokenType.MODEL, 270 "NOT DETERMINISTIC": TokenType.VOLATILE, 271 "RECORD": TokenType.STRUCT, 272 "TIMESTAMP": TokenType.TIMESTAMPTZ, 273 } 274 KEYWORDS.pop("DIV") 275 276 class Parser(parser.Parser): 277 PREFIXED_PIVOT_COLUMNS = True 278 279 LOG_DEFAULTS_TO_LN = True 280 281 FUNCTIONS = { 282 **parser.Parser.FUNCTIONS, 283 "DATE": _parse_date, 284 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 285 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 286 "DATE_TRUNC": lambda args: exp.DateTrunc( 287 unit=exp.Literal.string(str(seq_get(args, 1))), 288 this=seq_get(args, 0), 289 ), 290 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 291 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 292 "DIV": binary_from_function(exp.IntDiv), 293 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 294 "MD5": exp.MD5Digest.from_arg_list, 295 "TO_HEX": _parse_to_hex, 296 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 297 [seq_get(args, 1), seq_get(args, 0)] 298 ), 299 "PARSE_TIMESTAMP": _parse_timestamp, 300 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 301 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 302 this=seq_get(args, 0), 303 expression=seq_get(args, 1), 304 position=seq_get(args, 2), 305 occurrence=seq_get(args, 3), 306 group=exp.Literal.number(1) if re.compile(args[1].name).groups == 1 else None, 307 ), 308 "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)), 309 "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)), 310 "SPLIT": lambda args: exp.Split( 311 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 312 this=seq_get(args, 0), 313 expression=seq_get(args, 1) or exp.Literal.string(","), 314 ), 315 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 316 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 317 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 318 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 319 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 320 } 321 322 FUNCTION_PARSERS = { 323 **parser.Parser.FUNCTION_PARSERS, 324 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 325 } 326 FUNCTION_PARSERS.pop("TRIM") 327 328 NO_PAREN_FUNCTIONS = { 329 **parser.Parser.NO_PAREN_FUNCTIONS, 330 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 331 } 332 333 NESTED_TYPE_TOKENS = { 334 *parser.Parser.NESTED_TYPE_TOKENS, 335 TokenType.TABLE, 336 } 337 338 ID_VAR_TOKENS = { 339 *parser.Parser.ID_VAR_TOKENS, 340 TokenType.VALUES, 341 } 342 343 PROPERTY_PARSERS = { 344 **parser.Parser.PROPERTY_PARSERS, 345 "NOT DETERMINISTIC": lambda self: self.expression( 346 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 347 ), 348 "OPTIONS": lambda self: self._parse_with_property(), 349 } 350 351 CONSTRAINT_PARSERS = { 352 **parser.Parser.CONSTRAINT_PARSERS, 353 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 354 } 355 356 RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy() 357 RANGE_PARSERS.pop(TokenType.OVERLAPS, None) 358 359 NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN} 360 361 STATEMENT_PARSERS = { 362 **parser.Parser.STATEMENT_PARSERS, 363 TokenType.END: lambda self: self._parse_as_command(self._prev), 364 TokenType.FOR: lambda self: self._parse_for_in(), 365 } 366 367 def _parse_for_in(self) -> exp.ForIn: 368 this = self._parse_range() 369 self._match_text_seq("DO") 370 return self.expression(exp.ForIn, this=this, expression=self._parse_statement()) 371 372 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 373 this = super()._parse_table_part(schema=schema) or self._parse_number() 374 375 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 376 if isinstance(this, exp.Identifier): 377 table_name = this.name 378 while self._match(TokenType.DASH, advance=False) and self._next: 379 self._advance(2) 380 table_name += f"-{self._prev.text}" 381 382 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 383 elif isinstance(this, exp.Literal): 384 table_name = this.name 385 386 if ( 387 self._curr 388 and self._prev.end == self._curr.start - 1 389 and self._parse_var(any_token=True) 390 ): 391 table_name += self._prev.text 392 393 this = exp.Identifier(this=table_name, quoted=True) 394 395 return this 396 397 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 398 table = super()._parse_table_parts(schema=schema) 399 if isinstance(table.this, exp.Identifier) and "." in table.name: 400 catalog, db, this, *rest = ( 401 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 402 for x in split_num_words(table.name, ".", 3) 403 ) 404 405 if rest and this: 406 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 407 408 table = exp.Table(this=this, db=db, catalog=catalog) 409 410 return table 411 412 def _parse_json_object(self) -> exp.JSONObject: 413 json_object = super()._parse_json_object() 414 array_kv_pair = seq_get(json_object.expressions, 0) 415 416 # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation 417 # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2 418 if ( 419 array_kv_pair 420 and isinstance(array_kv_pair.this, exp.Array) 421 and isinstance(array_kv_pair.expression, exp.Array) 422 ): 423 keys = array_kv_pair.this.expressions 424 values = array_kv_pair.expression.expressions 425 426 json_object.set( 427 "expressions", 428 [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)], 429 ) 430 431 return json_object 432 433 class Generator(generator.Generator): 434 EXPLICIT_UNION = True 435 INTERVAL_ALLOWS_PLURAL_FORM = False 436 JOIN_HINTS = False 437 QUERY_HINTS = False 438 TABLE_HINTS = False 439 LIMIT_FETCH = "LIMIT" 440 RENAME_TABLE_WITH_DB = False 441 NVL2_SUPPORTED = False 442 UNNEST_WITH_ORDINALITY = False 443 COLLATE_IS_FUNC = True 444 445 TRANSFORMS = { 446 **generator.Generator.TRANSFORMS, 447 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 448 exp.ArgMax: arg_max_or_min_no_count("MAX_BY"), 449 exp.ArgMin: arg_max_or_min_no_count("MIN_BY"), 450 exp.ArraySize: rename_func("ARRAY_LENGTH"), 451 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 452 exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}" 453 if e.args.get("default") 454 else f"COLLATE {self.sql(e, 'this')}", 455 exp.Create: _create_sql, 456 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 457 exp.DateAdd: date_add_interval_sql("DATE", "ADD"), 458 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 459 exp.DateFromParts: rename_func("DATE"), 460 exp.DateStrToDate: datestrtodate_sql, 461 exp.DateSub: date_add_interval_sql("DATE", "SUB"), 462 exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"), 463 exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"), 464 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 465 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 466 exp.GroupConcat: rename_func("STRING_AGG"), 467 exp.Hex: rename_func("TO_HEX"), 468 exp.If: if_sql(false_value="NULL"), 469 exp.ILike: no_ilike_sql, 470 exp.IntDiv: rename_func("DIV"), 471 exp.JSONFormat: rename_func("TO_JSON_STRING"), 472 exp.JSONKeyValue: json_keyvalue_comma_sql, 473 exp.Max: max_or_greatest, 474 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 475 exp.MD5Digest: rename_func("MD5"), 476 exp.Min: min_or_least, 477 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 478 exp.RegexpExtract: lambda self, e: self.func( 479 "REGEXP_EXTRACT", 480 e.this, 481 e.expression, 482 e.args.get("position"), 483 e.args.get("occurrence"), 484 ), 485 exp.RegexpReplace: regexp_replace_sql, 486 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 487 exp.ReturnsProperty: _returnsproperty_sql, 488 exp.Select: transforms.preprocess( 489 [ 490 transforms.explode_to_unnest(), 491 _unqualify_unnest, 492 transforms.eliminate_distinct_on, 493 _alias_ordered_group, 494 transforms.eliminate_semi_and_anti_joins, 495 ] 496 ), 497 exp.SHA2: lambda self, e: self.func( 498 f"SHA256" if e.text("length") == "256" else "SHA512", e.this 499 ), 500 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 501 if e.name == "IMMUTABLE" 502 else "NOT DETERMINISTIC", 503 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 504 exp.StrToTime: lambda self, e: self.func( 505 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 506 ), 507 exp.TimeAdd: date_add_interval_sql("TIME", "ADD"), 508 exp.TimeSub: date_add_interval_sql("TIME", "SUB"), 509 exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"), 510 exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"), 511 exp.TimeStrToTime: timestrtotime_sql, 512 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 513 exp.TsOrDsAdd: date_add_interval_sql("DATE", "ADD"), 514 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 515 exp.Unhex: rename_func("FROM_HEX"), 516 exp.Values: _derived_table_values_to_unnest, 517 exp.VariancePop: rename_func("VAR_POP"), 518 } 519 520 TYPE_MAPPING = { 521 **generator.Generator.TYPE_MAPPING, 522 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 523 exp.DataType.Type.BIGINT: "INT64", 524 exp.DataType.Type.BINARY: "BYTES", 525 exp.DataType.Type.BOOLEAN: "BOOL", 526 exp.DataType.Type.CHAR: "STRING", 527 exp.DataType.Type.DECIMAL: "NUMERIC", 528 exp.DataType.Type.DOUBLE: "FLOAT64", 529 exp.DataType.Type.FLOAT: "FLOAT64", 530 exp.DataType.Type.INT: "INT64", 531 exp.DataType.Type.NCHAR: "STRING", 532 exp.DataType.Type.NVARCHAR: "STRING", 533 exp.DataType.Type.SMALLINT: "INT64", 534 exp.DataType.Type.TEXT: "STRING", 535 exp.DataType.Type.TIMESTAMP: "DATETIME", 536 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 537 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 538 exp.DataType.Type.TINYINT: "INT64", 539 exp.DataType.Type.VARBINARY: "BYTES", 540 exp.DataType.Type.VARCHAR: "STRING", 541 exp.DataType.Type.VARIANT: "ANY TYPE", 542 } 543 544 PROPERTIES_LOCATION = { 545 **generator.Generator.PROPERTIES_LOCATION, 546 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 547 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 548 } 549 550 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 551 RESERVED_KEYWORDS = { 552 *generator.Generator.RESERVED_KEYWORDS, 553 "all", 554 "and", 555 "any", 556 "array", 557 "as", 558 "asc", 559 "assert_rows_modified", 560 "at", 561 "between", 562 "by", 563 "case", 564 "cast", 565 "collate", 566 "contains", 567 "create", 568 "cross", 569 "cube", 570 "current", 571 "default", 572 "define", 573 "desc", 574 "distinct", 575 "else", 576 "end", 577 "enum", 578 "escape", 579 "except", 580 "exclude", 581 "exists", 582 "extract", 583 "false", 584 "fetch", 585 "following", 586 "for", 587 "from", 588 "full", 589 "group", 590 "grouping", 591 "groups", 592 "hash", 593 "having", 594 "if", 595 "ignore", 596 "in", 597 "inner", 598 "intersect", 599 "interval", 600 "into", 601 "is", 602 "join", 603 "lateral", 604 "left", 605 "like", 606 "limit", 607 "lookup", 608 "merge", 609 "natural", 610 "new", 611 "no", 612 "not", 613 "null", 614 "nulls", 615 "of", 616 "on", 617 "or", 618 "order", 619 "outer", 620 "over", 621 "partition", 622 "preceding", 623 "proto", 624 "qualify", 625 "range", 626 "recursive", 627 "respect", 628 "right", 629 "rollup", 630 "rows", 631 "select", 632 "set", 633 "some", 634 "struct", 635 "tablesample", 636 "then", 637 "to", 638 "treat", 639 "true", 640 "unbounded", 641 "union", 642 "unnest", 643 "using", 644 "when", 645 "where", 646 "window", 647 "with", 648 "within", 649 } 650 651 def eq_sql(self, expression: exp.EQ) -> str: 652 # Operands of = cannot be NULL in BigQuery 653 if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null): 654 return "NULL" 655 656 return self.binary(expression, "=") 657 658 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 659 parent = expression.parent 660 661 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 662 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 663 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 664 return self.func( 665 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 666 ) 667 668 return super().attimezone_sql(expression) 669 670 def trycast_sql(self, expression: exp.TryCast) -> str: 671 return self.cast_sql(expression, safe_prefix="SAFE_") 672 673 def cte_sql(self, expression: exp.CTE) -> str: 674 if expression.alias_column_names: 675 self.unsupported("Column names in CTE definition are not supported.") 676 return super().cte_sql(expression) 677 678 def array_sql(self, expression: exp.Array) -> str: 679 first_arg = seq_get(expression.expressions, 0) 680 if isinstance(first_arg, exp.Subqueryable): 681 return f"ARRAY{self.wrap(self.sql(first_arg))}" 682 683 return inline_array_sql(self, expression) 684 685 def transaction_sql(self, *_) -> str: 686 return "BEGIN TRANSACTION" 687 688 def commit_sql(self, *_) -> str: 689 return "COMMIT TRANSACTION" 690 691 def rollback_sql(self, *_) -> str: 692 return "ROLLBACK TRANSACTION" 693 694 def in_unnest_op(self, expression: exp.Unnest) -> str: 695 return self.sql(expression) 696 697 def except_op(self, expression: exp.Except) -> str: 698 if not expression.args.get("distinct", False): 699 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 700 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 701 702 def intersect_op(self, expression: exp.Intersect) -> str: 703 if not expression.args.get("distinct", False): 704 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 705 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 706 707 def with_properties(self, properties: exp.Properties) -> str: 708 return self.properties(properties, prefix=self.seg("OPTIONS")) 709 710 def version_sql(self, expression: exp.Version) -> str: 711 if expression.name == "TIMESTAMP": 712 expression.set("this", "SYSTEM_TIME") 713 return super().version_sql(expression)
logger =
<Logger sqlglot (WARNING)>
178class BigQuery(Dialect): 179 UNNEST_COLUMN_ONLY = True 180 SUPPORTS_USER_DEFINED_TYPES = False 181 SUPPORTS_SEMI_ANTI_JOIN = False 182 LOG_BASE_FIRST = False 183 184 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity 185 RESOLVES_IDENTIFIERS_AS_UPPERCASE = None 186 187 # bigquery udfs are case sensitive 188 NORMALIZE_FUNCTIONS = False 189 190 TIME_MAPPING = { 191 "%D": "%m/%d/%y", 192 } 193 194 ESCAPE_SEQUENCES = { 195 "\\a": "\a", 196 "\\b": "\b", 197 "\\f": "\f", 198 "\\n": "\n", 199 "\\r": "\r", 200 "\\t": "\t", 201 "\\v": "\v", 202 } 203 204 FORMAT_MAPPING = { 205 "DD": "%d", 206 "MM": "%m", 207 "MON": "%b", 208 "MONTH": "%B", 209 "YYYY": "%Y", 210 "YY": "%y", 211 "HH": "%I", 212 "HH12": "%I", 213 "HH24": "%H", 214 "MI": "%M", 215 "SS": "%S", 216 "SSSSS": "%f", 217 "TZH": "%z", 218 } 219 220 # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement 221 # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table 222 PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"} 223 224 @classmethod 225 def normalize_identifier(cls, expression: E) -> E: 226 if isinstance(expression, exp.Identifier): 227 parent = expression.parent 228 while isinstance(parent, exp.Dot): 229 parent = parent.parent 230 231 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 232 # The following check is essentially a heuristic to detect tables based on whether or 233 # not they're qualified. It also avoids normalizing UDFs, because they're case-sensitive. 234 if ( 235 not isinstance(parent, exp.UserDefinedFunction) 236 and not (isinstance(parent, exp.Table) and parent.db) 237 and not expression.meta.get("is_table") 238 ): 239 expression.set("this", expression.this.lower()) 240 241 return expression 242 243 class Tokenizer(tokens.Tokenizer): 244 QUOTES = ["'", '"', '"""', "'''"] 245 COMMENTS = ["--", "#", ("/*", "*/")] 246 IDENTIFIERS = ["`"] 247 STRING_ESCAPES = ["\\"] 248 249 HEX_STRINGS = [("0x", ""), ("0X", "")] 250 251 BYTE_STRINGS = [ 252 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 253 ] 254 255 RAW_STRINGS = [ 256 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 257 ] 258 259 KEYWORDS = { 260 **tokens.Tokenizer.KEYWORDS, 261 "ANY TYPE": TokenType.VARIANT, 262 "BEGIN": TokenType.COMMAND, 263 "BEGIN TRANSACTION": TokenType.BEGIN, 264 "BYTES": TokenType.BINARY, 265 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 266 "DECLARE": TokenType.COMMAND, 267 "FLOAT64": TokenType.DOUBLE, 268 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 269 "INT64": TokenType.BIGINT, 270 "MODEL": TokenType.MODEL, 271 "NOT DETERMINISTIC": TokenType.VOLATILE, 272 "RECORD": TokenType.STRUCT, 273 "TIMESTAMP": TokenType.TIMESTAMPTZ, 274 } 275 KEYWORDS.pop("DIV") 276 277 class Parser(parser.Parser): 278 PREFIXED_PIVOT_COLUMNS = True 279 280 LOG_DEFAULTS_TO_LN = True 281 282 FUNCTIONS = { 283 **parser.Parser.FUNCTIONS, 284 "DATE": _parse_date, 285 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 286 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 287 "DATE_TRUNC": lambda args: exp.DateTrunc( 288 unit=exp.Literal.string(str(seq_get(args, 1))), 289 this=seq_get(args, 0), 290 ), 291 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 292 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 293 "DIV": binary_from_function(exp.IntDiv), 294 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 295 "MD5": exp.MD5Digest.from_arg_list, 296 "TO_HEX": _parse_to_hex, 297 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 298 [seq_get(args, 1), seq_get(args, 0)] 299 ), 300 "PARSE_TIMESTAMP": _parse_timestamp, 301 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 302 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 303 this=seq_get(args, 0), 304 expression=seq_get(args, 1), 305 position=seq_get(args, 2), 306 occurrence=seq_get(args, 3), 307 group=exp.Literal.number(1) if re.compile(args[1].name).groups == 1 else None, 308 ), 309 "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)), 310 "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)), 311 "SPLIT": lambda args: exp.Split( 312 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 313 this=seq_get(args, 0), 314 expression=seq_get(args, 1) or exp.Literal.string(","), 315 ), 316 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 317 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 318 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 319 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 320 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 321 } 322 323 FUNCTION_PARSERS = { 324 **parser.Parser.FUNCTION_PARSERS, 325 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 326 } 327 FUNCTION_PARSERS.pop("TRIM") 328 329 NO_PAREN_FUNCTIONS = { 330 **parser.Parser.NO_PAREN_FUNCTIONS, 331 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 332 } 333 334 NESTED_TYPE_TOKENS = { 335 *parser.Parser.NESTED_TYPE_TOKENS, 336 TokenType.TABLE, 337 } 338 339 ID_VAR_TOKENS = { 340 *parser.Parser.ID_VAR_TOKENS, 341 TokenType.VALUES, 342 } 343 344 PROPERTY_PARSERS = { 345 **parser.Parser.PROPERTY_PARSERS, 346 "NOT DETERMINISTIC": lambda self: self.expression( 347 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 348 ), 349 "OPTIONS": lambda self: self._parse_with_property(), 350 } 351 352 CONSTRAINT_PARSERS = { 353 **parser.Parser.CONSTRAINT_PARSERS, 354 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 355 } 356 357 RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy() 358 RANGE_PARSERS.pop(TokenType.OVERLAPS, None) 359 360 NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN} 361 362 STATEMENT_PARSERS = { 363 **parser.Parser.STATEMENT_PARSERS, 364 TokenType.END: lambda self: self._parse_as_command(self._prev), 365 TokenType.FOR: lambda self: self._parse_for_in(), 366 } 367 368 def _parse_for_in(self) -> exp.ForIn: 369 this = self._parse_range() 370 self._match_text_seq("DO") 371 return self.expression(exp.ForIn, this=this, expression=self._parse_statement()) 372 373 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 374 this = super()._parse_table_part(schema=schema) or self._parse_number() 375 376 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 377 if isinstance(this, exp.Identifier): 378 table_name = this.name 379 while self._match(TokenType.DASH, advance=False) and self._next: 380 self._advance(2) 381 table_name += f"-{self._prev.text}" 382 383 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 384 elif isinstance(this, exp.Literal): 385 table_name = this.name 386 387 if ( 388 self._curr 389 and self._prev.end == self._curr.start - 1 390 and self._parse_var(any_token=True) 391 ): 392 table_name += self._prev.text 393 394 this = exp.Identifier(this=table_name, quoted=True) 395 396 return this 397 398 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 399 table = super()._parse_table_parts(schema=schema) 400 if isinstance(table.this, exp.Identifier) and "." in table.name: 401 catalog, db, this, *rest = ( 402 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 403 for x in split_num_words(table.name, ".", 3) 404 ) 405 406 if rest and this: 407 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 408 409 table = exp.Table(this=this, db=db, catalog=catalog) 410 411 return table 412 413 def _parse_json_object(self) -> exp.JSONObject: 414 json_object = super()._parse_json_object() 415 array_kv_pair = seq_get(json_object.expressions, 0) 416 417 # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation 418 # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2 419 if ( 420 array_kv_pair 421 and isinstance(array_kv_pair.this, exp.Array) 422 and isinstance(array_kv_pair.expression, exp.Array) 423 ): 424 keys = array_kv_pair.this.expressions 425 values = array_kv_pair.expression.expressions 426 427 json_object.set( 428 "expressions", 429 [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)], 430 ) 431 432 return json_object 433 434 class Generator(generator.Generator): 435 EXPLICIT_UNION = True 436 INTERVAL_ALLOWS_PLURAL_FORM = False 437 JOIN_HINTS = False 438 QUERY_HINTS = False 439 TABLE_HINTS = False 440 LIMIT_FETCH = "LIMIT" 441 RENAME_TABLE_WITH_DB = False 442 NVL2_SUPPORTED = False 443 UNNEST_WITH_ORDINALITY = False 444 COLLATE_IS_FUNC = True 445 446 TRANSFORMS = { 447 **generator.Generator.TRANSFORMS, 448 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 449 exp.ArgMax: arg_max_or_min_no_count("MAX_BY"), 450 exp.ArgMin: arg_max_or_min_no_count("MIN_BY"), 451 exp.ArraySize: rename_func("ARRAY_LENGTH"), 452 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 453 exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}" 454 if e.args.get("default") 455 else f"COLLATE {self.sql(e, 'this')}", 456 exp.Create: _create_sql, 457 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 458 exp.DateAdd: date_add_interval_sql("DATE", "ADD"), 459 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 460 exp.DateFromParts: rename_func("DATE"), 461 exp.DateStrToDate: datestrtodate_sql, 462 exp.DateSub: date_add_interval_sql("DATE", "SUB"), 463 exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"), 464 exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"), 465 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 466 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 467 exp.GroupConcat: rename_func("STRING_AGG"), 468 exp.Hex: rename_func("TO_HEX"), 469 exp.If: if_sql(false_value="NULL"), 470 exp.ILike: no_ilike_sql, 471 exp.IntDiv: rename_func("DIV"), 472 exp.JSONFormat: rename_func("TO_JSON_STRING"), 473 exp.JSONKeyValue: json_keyvalue_comma_sql, 474 exp.Max: max_or_greatest, 475 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 476 exp.MD5Digest: rename_func("MD5"), 477 exp.Min: min_or_least, 478 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 479 exp.RegexpExtract: lambda self, e: self.func( 480 "REGEXP_EXTRACT", 481 e.this, 482 e.expression, 483 e.args.get("position"), 484 e.args.get("occurrence"), 485 ), 486 exp.RegexpReplace: regexp_replace_sql, 487 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 488 exp.ReturnsProperty: _returnsproperty_sql, 489 exp.Select: transforms.preprocess( 490 [ 491 transforms.explode_to_unnest(), 492 _unqualify_unnest, 493 transforms.eliminate_distinct_on, 494 _alias_ordered_group, 495 transforms.eliminate_semi_and_anti_joins, 496 ] 497 ), 498 exp.SHA2: lambda self, e: self.func( 499 f"SHA256" if e.text("length") == "256" else "SHA512", e.this 500 ), 501 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 502 if e.name == "IMMUTABLE" 503 else "NOT DETERMINISTIC", 504 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 505 exp.StrToTime: lambda self, e: self.func( 506 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 507 ), 508 exp.TimeAdd: date_add_interval_sql("TIME", "ADD"), 509 exp.TimeSub: date_add_interval_sql("TIME", "SUB"), 510 exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"), 511 exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"), 512 exp.TimeStrToTime: timestrtotime_sql, 513 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 514 exp.TsOrDsAdd: date_add_interval_sql("DATE", "ADD"), 515 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 516 exp.Unhex: rename_func("FROM_HEX"), 517 exp.Values: _derived_table_values_to_unnest, 518 exp.VariancePop: rename_func("VAR_POP"), 519 } 520 521 TYPE_MAPPING = { 522 **generator.Generator.TYPE_MAPPING, 523 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 524 exp.DataType.Type.BIGINT: "INT64", 525 exp.DataType.Type.BINARY: "BYTES", 526 exp.DataType.Type.BOOLEAN: "BOOL", 527 exp.DataType.Type.CHAR: "STRING", 528 exp.DataType.Type.DECIMAL: "NUMERIC", 529 exp.DataType.Type.DOUBLE: "FLOAT64", 530 exp.DataType.Type.FLOAT: "FLOAT64", 531 exp.DataType.Type.INT: "INT64", 532 exp.DataType.Type.NCHAR: "STRING", 533 exp.DataType.Type.NVARCHAR: "STRING", 534 exp.DataType.Type.SMALLINT: "INT64", 535 exp.DataType.Type.TEXT: "STRING", 536 exp.DataType.Type.TIMESTAMP: "DATETIME", 537 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 538 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 539 exp.DataType.Type.TINYINT: "INT64", 540 exp.DataType.Type.VARBINARY: "BYTES", 541 exp.DataType.Type.VARCHAR: "STRING", 542 exp.DataType.Type.VARIANT: "ANY TYPE", 543 } 544 545 PROPERTIES_LOCATION = { 546 **generator.Generator.PROPERTIES_LOCATION, 547 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 548 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 549 } 550 551 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 552 RESERVED_KEYWORDS = { 553 *generator.Generator.RESERVED_KEYWORDS, 554 "all", 555 "and", 556 "any", 557 "array", 558 "as", 559 "asc", 560 "assert_rows_modified", 561 "at", 562 "between", 563 "by", 564 "case", 565 "cast", 566 "collate", 567 "contains", 568 "create", 569 "cross", 570 "cube", 571 "current", 572 "default", 573 "define", 574 "desc", 575 "distinct", 576 "else", 577 "end", 578 "enum", 579 "escape", 580 "except", 581 "exclude", 582 "exists", 583 "extract", 584 "false", 585 "fetch", 586 "following", 587 "for", 588 "from", 589 "full", 590 "group", 591 "grouping", 592 "groups", 593 "hash", 594 "having", 595 "if", 596 "ignore", 597 "in", 598 "inner", 599 "intersect", 600 "interval", 601 "into", 602 "is", 603 "join", 604 "lateral", 605 "left", 606 "like", 607 "limit", 608 "lookup", 609 "merge", 610 "natural", 611 "new", 612 "no", 613 "not", 614 "null", 615 "nulls", 616 "of", 617 "on", 618 "or", 619 "order", 620 "outer", 621 "over", 622 "partition", 623 "preceding", 624 "proto", 625 "qualify", 626 "range", 627 "recursive", 628 "respect", 629 "right", 630 "rollup", 631 "rows", 632 "select", 633 "set", 634 "some", 635 "struct", 636 "tablesample", 637 "then", 638 "to", 639 "treat", 640 "true", 641 "unbounded", 642 "union", 643 "unnest", 644 "using", 645 "when", 646 "where", 647 "window", 648 "with", 649 "within", 650 } 651 652 def eq_sql(self, expression: exp.EQ) -> str: 653 # Operands of = cannot be NULL in BigQuery 654 if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null): 655 return "NULL" 656 657 return self.binary(expression, "=") 658 659 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 660 parent = expression.parent 661 662 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 663 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 664 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 665 return self.func( 666 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 667 ) 668 669 return super().attimezone_sql(expression) 670 671 def trycast_sql(self, expression: exp.TryCast) -> str: 672 return self.cast_sql(expression, safe_prefix="SAFE_") 673 674 def cte_sql(self, expression: exp.CTE) -> str: 675 if expression.alias_column_names: 676 self.unsupported("Column names in CTE definition are not supported.") 677 return super().cte_sql(expression) 678 679 def array_sql(self, expression: exp.Array) -> str: 680 first_arg = seq_get(expression.expressions, 0) 681 if isinstance(first_arg, exp.Subqueryable): 682 return f"ARRAY{self.wrap(self.sql(first_arg))}" 683 684 return inline_array_sql(self, expression) 685 686 def transaction_sql(self, *_) -> str: 687 return "BEGIN TRANSACTION" 688 689 def commit_sql(self, *_) -> str: 690 return "COMMIT TRANSACTION" 691 692 def rollback_sql(self, *_) -> str: 693 return "ROLLBACK TRANSACTION" 694 695 def in_unnest_op(self, expression: exp.Unnest) -> str: 696 return self.sql(expression) 697 698 def except_op(self, expression: exp.Except) -> str: 699 if not expression.args.get("distinct", False): 700 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 701 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 702 703 def intersect_op(self, expression: exp.Intersect) -> str: 704 if not expression.args.get("distinct", False): 705 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 706 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 707 708 def with_properties(self, properties: exp.Properties) -> str: 709 return self.properties(properties, prefix=self.seg("OPTIONS")) 710 711 def version_sql(self, expression: exp.Version) -> str: 712 if expression.name == "TIMESTAMP": 713 expression.set("this", "SYSTEM_TIME") 714 return super().version_sql(expression)
ESCAPE_SEQUENCES: Dict[str, str] =
{'\\a': '\x07', '\\b': '\x08', '\\f': '\x0c', '\\n': '\n', '\\r': '\r', '\\t': '\t', '\\v': '\x0b'}
FORMAT_MAPPING: Dict[str, str] =
{'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
@classmethod
def
normalize_identifier(cls, expression: ~E) -> ~E:
224 @classmethod 225 def normalize_identifier(cls, expression: E) -> E: 226 if isinstance(expression, exp.Identifier): 227 parent = expression.parent 228 while isinstance(parent, exp.Dot): 229 parent = parent.parent 230 231 # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least). 232 # The following check is essentially a heuristic to detect tables based on whether or 233 # not they're qualified. It also avoids normalizing UDFs, because they're case-sensitive. 234 if ( 235 not isinstance(parent, exp.UserDefinedFunction) 236 and not (isinstance(parent, exp.Table) and parent.db) 237 and not expression.meta.get("is_table") 238 ): 239 expression.set("this", expression.this.lower()) 240 241 return expression
Normalizes an unquoted identifier to either lower or upper case, thus essentially making it case-insensitive. If a dialect treats all identifiers as case-insensitive, they will be normalized to lowercase regardless of being quoted or not.
tokenizer_class =
<class 'BigQuery.Tokenizer'>
parser_class =
<class 'BigQuery.Parser'>
generator_class =
<class 'BigQuery.Generator'>
FORMAT_TRIE: Dict =
{'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
INVERSE_ESCAPE_SEQUENCES: Dict[str, str] =
{'\x07': '\\a', '\x08': '\\b', '\x0c': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0b': '\\v'}
Inherited Members
- sqlglot.dialects.dialect.Dialect
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- NULL_ORDERING
- TYPED_DIVISION
- SAFE_DIVISION
- DATE_FORMAT
- DATEINT_FORMAT
- TIME_FORMAT
- get_or_raise
- format_time
- case_sensitive
- can_identify
- quote_identifier
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- parser
- generator
243 class Tokenizer(tokens.Tokenizer): 244 QUOTES = ["'", '"', '"""', "'''"] 245 COMMENTS = ["--", "#", ("/*", "*/")] 246 IDENTIFIERS = ["`"] 247 STRING_ESCAPES = ["\\"] 248 249 HEX_STRINGS = [("0x", ""), ("0X", "")] 250 251 BYTE_STRINGS = [ 252 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B") 253 ] 254 255 RAW_STRINGS = [ 256 (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") 257 ] 258 259 KEYWORDS = { 260 **tokens.Tokenizer.KEYWORDS, 261 "ANY TYPE": TokenType.VARIANT, 262 "BEGIN": TokenType.COMMAND, 263 "BEGIN TRANSACTION": TokenType.BEGIN, 264 "BYTES": TokenType.BINARY, 265 "CURRENT_DATETIME": TokenType.CURRENT_DATETIME, 266 "DECLARE": TokenType.COMMAND, 267 "FLOAT64": TokenType.DOUBLE, 268 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 269 "INT64": TokenType.BIGINT, 270 "MODEL": TokenType.MODEL, 271 "NOT DETERMINISTIC": TokenType.VOLATILE, 272 "RECORD": TokenType.STRUCT, 273 "TIMESTAMP": TokenType.TIMESTAMPTZ, 274 } 275 KEYWORDS.pop("DIV")
BYTE_STRINGS =
[("b'", "'"), ("B'", "'"), ('b"', '"'), ('B"', '"'), ('b"""', '"""'), ('B"""', '"""'), ("b'''", "'''"), ("B'''", "'''")]
RAW_STRINGS =
[("r'", "'"), ("R'", "'"), ('r"', '"'), ('R"', '"'), ('r"""', '"""'), ('R"""', '"""'), ("r'''", "'''"), ("R'''", "'''")]
KEYWORDS =
{'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'KILL': <TokenType.KILL: 'KILL'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'MEDIUMINT': <TokenType.MEDIUMINT: 'MEDIUMINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.BIGINT: 'BIGINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'LONGTEXT': <TokenType.LONGTEXT: 'LONGTEXT'>, 'MEDIUMTEXT': <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, 'TINYTEXT': <TokenType.TINYTEXT: 'TINYTEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'LONGBLOB': <TokenType.LONGBLOB: 'LONGBLOB'>, 'MEDIUMBLOB': <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, 'TINYBLOB': <TokenType.TINYBLOB: 'TINYBLOB'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'MODEL': <TokenType.MODEL: 'MODEL'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>}
277 class Parser(parser.Parser): 278 PREFIXED_PIVOT_COLUMNS = True 279 280 LOG_DEFAULTS_TO_LN = True 281 282 FUNCTIONS = { 283 **parser.Parser.FUNCTIONS, 284 "DATE": _parse_date, 285 "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd), 286 "DATE_SUB": parse_date_delta_with_interval(exp.DateSub), 287 "DATE_TRUNC": lambda args: exp.DateTrunc( 288 unit=exp.Literal.string(str(seq_get(args, 1))), 289 this=seq_get(args, 0), 290 ), 291 "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd), 292 "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub), 293 "DIV": binary_from_function(exp.IntDiv), 294 "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list, 295 "MD5": exp.MD5Digest.from_arg_list, 296 "TO_HEX": _parse_to_hex, 297 "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")( 298 [seq_get(args, 1), seq_get(args, 0)] 299 ), 300 "PARSE_TIMESTAMP": _parse_timestamp, 301 "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list, 302 "REGEXP_EXTRACT": lambda args: exp.RegexpExtract( 303 this=seq_get(args, 0), 304 expression=seq_get(args, 1), 305 position=seq_get(args, 2), 306 occurrence=seq_get(args, 3), 307 group=exp.Literal.number(1) if re.compile(args[1].name).groups == 1 else None, 308 ), 309 "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)), 310 "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)), 311 "SPLIT": lambda args: exp.Split( 312 # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split 313 this=seq_get(args, 0), 314 expression=seq_get(args, 1) or exp.Literal.string(","), 315 ), 316 "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd), 317 "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub), 318 "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd), 319 "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub), 320 "TO_JSON_STRING": exp.JSONFormat.from_arg_list, 321 } 322 323 FUNCTION_PARSERS = { 324 **parser.Parser.FUNCTION_PARSERS, 325 "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]), 326 } 327 FUNCTION_PARSERS.pop("TRIM") 328 329 NO_PAREN_FUNCTIONS = { 330 **parser.Parser.NO_PAREN_FUNCTIONS, 331 TokenType.CURRENT_DATETIME: exp.CurrentDatetime, 332 } 333 334 NESTED_TYPE_TOKENS = { 335 *parser.Parser.NESTED_TYPE_TOKENS, 336 TokenType.TABLE, 337 } 338 339 ID_VAR_TOKENS = { 340 *parser.Parser.ID_VAR_TOKENS, 341 TokenType.VALUES, 342 } 343 344 PROPERTY_PARSERS = { 345 **parser.Parser.PROPERTY_PARSERS, 346 "NOT DETERMINISTIC": lambda self: self.expression( 347 exp.StabilityProperty, this=exp.Literal.string("VOLATILE") 348 ), 349 "OPTIONS": lambda self: self._parse_with_property(), 350 } 351 352 CONSTRAINT_PARSERS = { 353 **parser.Parser.CONSTRAINT_PARSERS, 354 "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()), 355 } 356 357 RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy() 358 RANGE_PARSERS.pop(TokenType.OVERLAPS, None) 359 360 NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN} 361 362 STATEMENT_PARSERS = { 363 **parser.Parser.STATEMENT_PARSERS, 364 TokenType.END: lambda self: self._parse_as_command(self._prev), 365 TokenType.FOR: lambda self: self._parse_for_in(), 366 } 367 368 def _parse_for_in(self) -> exp.ForIn: 369 this = self._parse_range() 370 self._match_text_seq("DO") 371 return self.expression(exp.ForIn, this=this, expression=self._parse_statement()) 372 373 def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]: 374 this = super()._parse_table_part(schema=schema) or self._parse_number() 375 376 # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names 377 if isinstance(this, exp.Identifier): 378 table_name = this.name 379 while self._match(TokenType.DASH, advance=False) and self._next: 380 self._advance(2) 381 table_name += f"-{self._prev.text}" 382 383 this = exp.Identifier(this=table_name, quoted=this.args.get("quoted")) 384 elif isinstance(this, exp.Literal): 385 table_name = this.name 386 387 if ( 388 self._curr 389 and self._prev.end == self._curr.start - 1 390 and self._parse_var(any_token=True) 391 ): 392 table_name += self._prev.text 393 394 this = exp.Identifier(this=table_name, quoted=True) 395 396 return this 397 398 def _parse_table_parts(self, schema: bool = False) -> exp.Table: 399 table = super()._parse_table_parts(schema=schema) 400 if isinstance(table.this, exp.Identifier) and "." in table.name: 401 catalog, db, this, *rest = ( 402 t.cast(t.Optional[exp.Expression], exp.to_identifier(x)) 403 for x in split_num_words(table.name, ".", 3) 404 ) 405 406 if rest and this: 407 this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest])) 408 409 table = exp.Table(this=this, db=db, catalog=catalog) 410 411 return table 412 413 def _parse_json_object(self) -> exp.JSONObject: 414 json_object = super()._parse_json_object() 415 array_kv_pair = seq_get(json_object.expressions, 0) 416 417 # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation 418 # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2 419 if ( 420 array_kv_pair 421 and isinstance(array_kv_pair.this, exp.Array) 422 and isinstance(array_kv_pair.expression, exp.Array) 423 ): 424 keys = array_kv_pair.this.expressions 425 values = array_kv_pair.expression.expressions 426 427 json_object.set( 428 "expressions", 429 [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)], 430 ) 431 432 return json_object
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: Determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
FUNCTIONS =
{'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'APPROX_TOP_K': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxTopK'>>, 'ARG_MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARGMAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'MAX_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARG_MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARGMIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'MIN_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CHR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'COLLATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Collate'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _parse_date>, 'DATE_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ExplodeOuter'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FLATTEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Flatten'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'J_S_O_N_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArray'>>, 'J_S_O_N_ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayAgg'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'J_S_O_N_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONTable'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NULLIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nullif'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PARSE_JSON': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'JSON_PARSE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POSEXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PosexplodeOuter'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'PREDICT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Predict'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SET_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SetAgg'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Timestamp'>>, 'TIMESTAMP_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TO_DAYS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToDays'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'DIV': <function binary_from_function.<locals>.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _parse_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SHA256': <function BigQuery.Parser.<lambda>>, 'SHA512': <function BigQuery.Parser.<lambda>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>}
FUNCTION_PARSERS =
{'ANY_VALUE': <function Parser.<lambda>>, 'CAST': <function Parser.<lambda>>, 'CONCAT': <function Parser.<lambda>>, 'CONCAT_WS': <function Parser.<lambda>>, 'CONVERT': <function Parser.<lambda>>, 'DECODE': <function Parser.<lambda>>, 'EXTRACT': <function Parser.<lambda>>, 'JSON_OBJECT': <function Parser.<lambda>>, 'JSON_TABLE': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATCH': <function Parser.<lambda>>, 'OPENJSON': <function Parser.<lambda>>, 'POSITION': <function Parser.<lambda>>, 'PREDICT': <function Parser.<lambda>>, 'SAFE_CAST': <function Parser.<lambda>>, 'STRING_AGG': <function Parser.<lambda>>, 'SUBSTRING': <function Parser.<lambda>>, 'TRY_CAST': <function Parser.<lambda>>, 'TRY_CONVERT': <function Parser.<lambda>>, 'ARRAY': <function BigQuery.Parser.<lambda>>}
NO_PAREN_FUNCTIONS =
{<TokenType.CURRENT_DATE: 'CURRENT_DATE'>: <class 'sqlglot.expressions.CurrentDate'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>: <class 'sqlglot.expressions.CurrentDatetime'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>: <class 'sqlglot.expressions.CurrentTime'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>: <class 'sqlglot.expressions.CurrentTimestamp'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>: <class 'sqlglot.expressions.CurrentUser'>}
NESTED_TYPE_TOKENS =
{<TokenType.ARRAY: 'ARRAY'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.NESTED: 'NESTED'>, <TokenType.TABLE: 'TABLE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.MAP: 'MAP'>}
ID_VAR_TOKENS =
{<TokenType.BIT: 'BIT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.JSONB: 'JSONB'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.TEXT: 'TEXT'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.FALSE: 'FALSE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.DIV: 'DIV'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.YEAR: 'YEAR'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.ANY: 'ANY'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.LOAD: 'LOAD'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.CACHE: 'CACHE'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.MERGE: 'MERGE'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.ROW: 'ROW'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.KILL: 'KILL'>, <TokenType.ALL: 'ALL'>, <TokenType.VALUES: 'VALUES'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.DESC: 'DESC'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.DELETE: 'DELETE'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.SET: 'SET'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.SEMI: 'SEMI'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.SOME: 'SOME'>, <TokenType.FILTER: 'FILTER'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.SHOW: 'SHOW'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.UUID: 'UUID'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.ANTI: 'ANTI'>, <TokenType.SUPER: 'SUPER'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.TABLE: 'TABLE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.LEFT: 'LEFT'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.JSON: 'JSON'>, <TokenType.INT256: 'INT256'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.ASC: 'ASC'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.TIME: 'TIME'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.MAP: 'MAP'>, <TokenType.VIEW: 'VIEW'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.CASE: 'CASE'>, <TokenType.RANGE: 'RANGE'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.INT: 'INT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.UINT128: 'UINT128'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.INT128: 'INT128'>, <TokenType.END: 'END'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.XML: 'XML'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NULL: 'NULL'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.NEXT: 'NEXT'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.MODEL: 'MODEL'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.CHAR: 'CHAR'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.UINT: 'UINT'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.APPLY: 'APPLY'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.DATE: 'DATE'>, <TokenType.NESTED: 'NESTED'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.KEEP: 'KEEP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.IS: 'IS'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.ROWS: 'ROWS'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.MONEY: 'MONEY'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INET: 'INET'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.TOP: 'TOP'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.VAR: 'VAR'>, <TokenType.FULL: 'FULL'>, <TokenType.USE: 'USE'>, <TokenType.ENUM: 'ENUM'>}
PROPERTY_PARSERS =
{'ALGORITHM': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARSET': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'HEAP': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'INPUT': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'OUTPUT': <function Parser.<lambda>>, 'PARTITION': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'REMOTE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SAMPLE': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'SYSTEM_VERSIONING': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TRANSFORM': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
CONSTRAINT_PARSERS =
{'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'NONCLUSTERED': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PERIOD': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
RANGE_PARSERS =
{<TokenType.BETWEEN: 'BETWEEN'>: <function Parser.<lambda>>, <TokenType.GLOB: 'GLOB'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.ILIKE: 'ILIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.IN: 'IN'>: <function Parser.<lambda>>, <TokenType.IRLIKE: 'IRLIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.IS: 'IS'>: <function Parser.<lambda>>, <TokenType.LIKE: 'LIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.RLIKE: 'RLIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.SIMILAR_TO: 'SIMILAR_TO'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.FOR: 'FOR'>: <function Parser.<lambda>>}
STATEMENT_PARSERS =
{<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.KILL: 'KILL'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.REFRESH: 'REFRESH'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.END: 'END'>: <function BigQuery.Parser.<lambda>>, <TokenType.FOR: 'FOR'>: <function BigQuery.Parser.<lambda>>}
TABLE_ALIAS_TOKENS =
{<TokenType.BIT: 'BIT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.JSONB: 'JSONB'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.TEXT: 'TEXT'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.FALSE: 'FALSE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.DIV: 'DIV'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.YEAR: 'YEAR'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.ANY: 'ANY'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.LOAD: 'LOAD'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.BINARY: 'BINARY'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.CACHE: 'CACHE'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.MERGE: 'MERGE'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.ROW: 'ROW'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.KILL: 'KILL'>, <TokenType.ALL: 'ALL'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.DESC: 'DESC'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.DELETE: 'DELETE'>, <TokenType.ENUM: 'ENUM'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.SET: 'SET'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.UINT256: 'UINT256'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.SEMI: 'SEMI'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.SOME: 'SOME'>, <TokenType.FILTER: 'FILTER'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.SHOW: 'SHOW'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.UUID: 'UUID'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.SUPER: 'SUPER'>, <TokenType.ANTI: 'ANTI'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.TABLE: 'TABLE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.JSON: 'JSON'>, <TokenType.INT256: 'INT256'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.ASC: 'ASC'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.COMMENT: 'COMMENT'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.TIME: 'TIME'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.MAP: 'MAP'>, <TokenType.VIEW: 'VIEW'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.CASE: 'CASE'>, <TokenType.RANGE: 'RANGE'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.INT: 'INT'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.UINT128: 'UINT128'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.INT128: 'INT128'>, <TokenType.END: 'END'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.XML: 'XML'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NULL: 'NULL'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.NEXT: 'NEXT'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.MODEL: 'MODEL'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.CHAR: 'CHAR'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.UINT: 'UINT'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.DATE: 'DATE'>, <TokenType.NESTED: 'NESTED'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.KEEP: 'KEEP'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.IS: 'IS'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.ROWS: 'ROWS'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.MONEY: 'MONEY'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.INET: 'INET'>, <TokenType.TOP: 'TOP'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.VAR: 'VAR'>, <TokenType.USE: 'USE'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>}
SET_TRIE: Dict =
{'GLOBAL': {0: True}, 'LOCAL': {0: True}, 'SESSION': {0: True}, 'TRANSACTION': {0: True}}
FORMAT_MAPPING: Dict[str, str] =
{'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
FORMAT_TRIE: Dict =
{'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
Inherited Members
- sqlglot.parser.Parser
- Parser
- STRUCT_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_KEYWORDS
- DB_CREATABLES
- CREATABLES
- INTERVAL_VARS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- JOIN_HINTS
- LAMBDAS
- COLUMN_OPERATORS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- QUERY_MODIFIER_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- MODIFIABLES
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- CLONE_KINDS
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- STRICT_CAST
- CONCAT_NULL_OUTPUTS_STRING
- IDENTIFY_PIVOT_STRINGS
- ALTER_TABLE_ADD_COLUMN_KEYWORD
- TABLESAMPLE_CSV
- SET_REQUIRES_ASSIGNMENT_DELIMITER
- TRIM_PATTERN_FIRST
- TYPED_DIVISION
- SAFE_DIVISION
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- STRICT_STRING_CONCAT
- NULL_ORDERING
- error_level
- error_message_context
- max_errors
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
434 class Generator(generator.Generator): 435 EXPLICIT_UNION = True 436 INTERVAL_ALLOWS_PLURAL_FORM = False 437 JOIN_HINTS = False 438 QUERY_HINTS = False 439 TABLE_HINTS = False 440 LIMIT_FETCH = "LIMIT" 441 RENAME_TABLE_WITH_DB = False 442 NVL2_SUPPORTED = False 443 UNNEST_WITH_ORDINALITY = False 444 COLLATE_IS_FUNC = True 445 446 TRANSFORMS = { 447 **generator.Generator.TRANSFORMS, 448 exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"), 449 exp.ArgMax: arg_max_or_min_no_count("MAX_BY"), 450 exp.ArgMin: arg_max_or_min_no_count("MIN_BY"), 451 exp.ArraySize: rename_func("ARRAY_LENGTH"), 452 exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), 453 exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}" 454 if e.args.get("default") 455 else f"COLLATE {self.sql(e, 'this')}", 456 exp.Create: _create_sql, 457 exp.CTE: transforms.preprocess([_pushdown_cte_column_names]), 458 exp.DateAdd: date_add_interval_sql("DATE", "ADD"), 459 exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})", 460 exp.DateFromParts: rename_func("DATE"), 461 exp.DateStrToDate: datestrtodate_sql, 462 exp.DateSub: date_add_interval_sql("DATE", "SUB"), 463 exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"), 464 exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"), 465 exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")), 466 exp.GenerateSeries: rename_func("GENERATE_ARRAY"), 467 exp.GroupConcat: rename_func("STRING_AGG"), 468 exp.Hex: rename_func("TO_HEX"), 469 exp.If: if_sql(false_value="NULL"), 470 exp.ILike: no_ilike_sql, 471 exp.IntDiv: rename_func("DIV"), 472 exp.JSONFormat: rename_func("TO_JSON_STRING"), 473 exp.JSONKeyValue: json_keyvalue_comma_sql, 474 exp.Max: max_or_greatest, 475 exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)), 476 exp.MD5Digest: rename_func("MD5"), 477 exp.Min: min_or_least, 478 exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}", 479 exp.RegexpExtract: lambda self, e: self.func( 480 "REGEXP_EXTRACT", 481 e.this, 482 e.expression, 483 e.args.get("position"), 484 e.args.get("occurrence"), 485 ), 486 exp.RegexpReplace: regexp_replace_sql, 487 exp.RegexpLike: rename_func("REGEXP_CONTAINS"), 488 exp.ReturnsProperty: _returnsproperty_sql, 489 exp.Select: transforms.preprocess( 490 [ 491 transforms.explode_to_unnest(), 492 _unqualify_unnest, 493 transforms.eliminate_distinct_on, 494 _alias_ordered_group, 495 transforms.eliminate_semi_and_anti_joins, 496 ] 497 ), 498 exp.SHA2: lambda self, e: self.func( 499 f"SHA256" if e.text("length") == "256" else "SHA512", e.this 500 ), 501 exp.StabilityProperty: lambda self, e: f"DETERMINISTIC" 502 if e.name == "IMMUTABLE" 503 else "NOT DETERMINISTIC", 504 exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})", 505 exp.StrToTime: lambda self, e: self.func( 506 "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone") 507 ), 508 exp.TimeAdd: date_add_interval_sql("TIME", "ADD"), 509 exp.TimeSub: date_add_interval_sql("TIME", "SUB"), 510 exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"), 511 exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"), 512 exp.TimeStrToTime: timestrtotime_sql, 513 exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression), 514 exp.TsOrDsAdd: date_add_interval_sql("DATE", "ADD"), 515 exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"), 516 exp.Unhex: rename_func("FROM_HEX"), 517 exp.Values: _derived_table_values_to_unnest, 518 exp.VariancePop: rename_func("VAR_POP"), 519 } 520 521 TYPE_MAPPING = { 522 **generator.Generator.TYPE_MAPPING, 523 exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC", 524 exp.DataType.Type.BIGINT: "INT64", 525 exp.DataType.Type.BINARY: "BYTES", 526 exp.DataType.Type.BOOLEAN: "BOOL", 527 exp.DataType.Type.CHAR: "STRING", 528 exp.DataType.Type.DECIMAL: "NUMERIC", 529 exp.DataType.Type.DOUBLE: "FLOAT64", 530 exp.DataType.Type.FLOAT: "FLOAT64", 531 exp.DataType.Type.INT: "INT64", 532 exp.DataType.Type.NCHAR: "STRING", 533 exp.DataType.Type.NVARCHAR: "STRING", 534 exp.DataType.Type.SMALLINT: "INT64", 535 exp.DataType.Type.TEXT: "STRING", 536 exp.DataType.Type.TIMESTAMP: "DATETIME", 537 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 538 exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP", 539 exp.DataType.Type.TINYINT: "INT64", 540 exp.DataType.Type.VARBINARY: "BYTES", 541 exp.DataType.Type.VARCHAR: "STRING", 542 exp.DataType.Type.VARIANT: "ANY TYPE", 543 } 544 545 PROPERTIES_LOCATION = { 546 **generator.Generator.PROPERTIES_LOCATION, 547 exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA, 548 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 549 } 550 551 # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords 552 RESERVED_KEYWORDS = { 553 *generator.Generator.RESERVED_KEYWORDS, 554 "all", 555 "and", 556 "any", 557 "array", 558 "as", 559 "asc", 560 "assert_rows_modified", 561 "at", 562 "between", 563 "by", 564 "case", 565 "cast", 566 "collate", 567 "contains", 568 "create", 569 "cross", 570 "cube", 571 "current", 572 "default", 573 "define", 574 "desc", 575 "distinct", 576 "else", 577 "end", 578 "enum", 579 "escape", 580 "except", 581 "exclude", 582 "exists", 583 "extract", 584 "false", 585 "fetch", 586 "following", 587 "for", 588 "from", 589 "full", 590 "group", 591 "grouping", 592 "groups", 593 "hash", 594 "having", 595 "if", 596 "ignore", 597 "in", 598 "inner", 599 "intersect", 600 "interval", 601 "into", 602 "is", 603 "join", 604 "lateral", 605 "left", 606 "like", 607 "limit", 608 "lookup", 609 "merge", 610 "natural", 611 "new", 612 "no", 613 "not", 614 "null", 615 "nulls", 616 "of", 617 "on", 618 "or", 619 "order", 620 "outer", 621 "over", 622 "partition", 623 "preceding", 624 "proto", 625 "qualify", 626 "range", 627 "recursive", 628 "respect", 629 "right", 630 "rollup", 631 "rows", 632 "select", 633 "set", 634 "some", 635 "struct", 636 "tablesample", 637 "then", 638 "to", 639 "treat", 640 "true", 641 "unbounded", 642 "union", 643 "unnest", 644 "using", 645 "when", 646 "where", 647 "window", 648 "with", 649 "within", 650 } 651 652 def eq_sql(self, expression: exp.EQ) -> str: 653 # Operands of = cannot be NULL in BigQuery 654 if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null): 655 return "NULL" 656 657 return self.binary(expression, "=") 658 659 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 660 parent = expression.parent 661 662 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 663 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 664 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 665 return self.func( 666 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 667 ) 668 669 return super().attimezone_sql(expression) 670 671 def trycast_sql(self, expression: exp.TryCast) -> str: 672 return self.cast_sql(expression, safe_prefix="SAFE_") 673 674 def cte_sql(self, expression: exp.CTE) -> str: 675 if expression.alias_column_names: 676 self.unsupported("Column names in CTE definition are not supported.") 677 return super().cte_sql(expression) 678 679 def array_sql(self, expression: exp.Array) -> str: 680 first_arg = seq_get(expression.expressions, 0) 681 if isinstance(first_arg, exp.Subqueryable): 682 return f"ARRAY{self.wrap(self.sql(first_arg))}" 683 684 return inline_array_sql(self, expression) 685 686 def transaction_sql(self, *_) -> str: 687 return "BEGIN TRANSACTION" 688 689 def commit_sql(self, *_) -> str: 690 return "COMMIT TRANSACTION" 691 692 def rollback_sql(self, *_) -> str: 693 return "ROLLBACK TRANSACTION" 694 695 def in_unnest_op(self, expression: exp.Unnest) -> str: 696 return self.sql(expression) 697 698 def except_op(self, expression: exp.Except) -> str: 699 if not expression.args.get("distinct", False): 700 self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery") 701 return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 702 703 def intersect_op(self, expression: exp.Intersect) -> str: 704 if not expression.args.get("distinct", False): 705 self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery") 706 return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}" 707 708 def with_properties(self, properties: exp.Properties) -> str: 709 return self.properties(properties, prefix=self.seg("OPTIONS")) 710 711 def version_sql(self, expression: exp.Version) -> str: 712 if expression.name == "TIMESTAMP": 713 expression.set("this", "SYSTEM_TIME") 714 return super().version_sql(expression)
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether or not to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether or not to normalize identifiers to lowercase. Default: False.
- pad: Determines the pad size in a formatted string. Default: 2.
- indent: Determines the indentation size in a formatted string. Default: 2.
- normalize_functions: Whether or not to normalize all function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Determines whether or not the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
TRANSFORMS =
{<class 'sqlglot.expressions.DateAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalSpan'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OutputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SampleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransformModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArgMax'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArgMin'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.CollateProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.If'>: <function if_sql.<locals>._if_sql>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONKeyValue'>: <function json_keyvalue_comma_sql>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpReplace'>: <function regexp_replace_sql>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.SHA2'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>}
TYPE_MAPPING =
{<Type.NCHAR: 'NCHAR'>: 'STRING', <Type.NVARCHAR: 'NVARCHAR'>: 'STRING', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.TINYTEXT: 'TINYTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.TINYBLOB: 'TINYBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.BIGDECIMAL: 'BIGDECIMAL'>: 'BIGNUMERIC', <Type.BIGINT: 'BIGINT'>: 'INT64', <Type.BINARY: 'BINARY'>: 'BYTES', <Type.BOOLEAN: 'BOOLEAN'>: 'BOOL', <Type.CHAR: 'CHAR'>: 'STRING', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DOUBLE: 'DOUBLE'>: 'FLOAT64', <Type.FLOAT: 'FLOAT'>: 'FLOAT64', <Type.INT: 'INT'>: 'INT64', <Type.SMALLINT: 'SMALLINT'>: 'INT64', <Type.TEXT: 'TEXT'>: 'STRING', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'TIMESTAMP', <Type.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>: 'TIMESTAMP', <Type.TINYINT: 'TINYINT'>: 'INT64', <Type.VARBINARY: 'VARBINARY'>: 'BYTES', <Type.VARCHAR: 'VARCHAR'>: 'STRING', <Type.VARIANT: 'VARIANT'>: 'ANY TYPE'}
PROPERTIES_LOCATION =
{<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.HeapProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.InputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OutputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedOfProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SampleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.TransformModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.WithSystemVersioningProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>}
RESERVED_KEYWORDS =
{'struct', 'into', 'if', 'cross', 'range', 'lookup', 'unnest', 'to', 'tablesample', 'select', 'ignore', 'collate', 'merge', 'union', 'preceding', 'assert_rows_modified', 'left', 'proto', 'null', 'treat', 'following', 'of', 'for', 'from', 'except', 'intersect', 'no', 'like', 'where', 'having', 'current', 'true', 'group', 'cube', 'not', 'false', 'using', 'is', 'respect', 'cast', 'by', 'hash', 'new', 'limit', 'over', 'then', 'enum', 'exists', 'asc', 'desc', 'rollup', 'else', 'right', 'grouping', 'unbounded', 'extract', 'contains', 'array', 'between', 'in', 'as', 'lateral', 'exclude', 'partition', 'end', 'full', 'rows', 'on', 'some', 'any', 'and', 'with', 'or', 'fetch', 'create', 'define', 'when', 'at', 'distinct', 'set', 'qualify', 'window', 'natural', 'nulls', 'interval', 'all', 'within', 'case', 'inner', 'default', 'recursive', 'escape', 'join', 'order', 'outer', 'groups'}
659 def attimezone_sql(self, expression: exp.AtTimeZone) -> str: 660 parent = expression.parent 661 662 # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]). 663 # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included. 664 if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"): 665 return self.func( 666 "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone")) 667 ) 668 669 return super().attimezone_sql(expression)
INVERSE_ESCAPE_SEQUENCES: Dict[str, str] =
{'\x07': '\\a', '\x08': '\\b', '\x0c': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0b': '\\v'}
@classmethod
def
can_identify(text: str, identify: str | bool = 'safe') -> bool:
288 @classmethod 289 def can_identify(cls, text: str, identify: str | bool = "safe") -> bool: 290 """Checks if text can be identified given an identify option. 291 292 Args: 293 text: The text to check. 294 identify: 295 "always" or `True`: Always returns true. 296 "safe": True if the identifier is case-insensitive. 297 298 Returns: 299 Whether or not the given text can be identified. 300 """ 301 if identify is True or identify == "always": 302 return True 303 304 if identify == "safe": 305 return not cls.case_sensitive(text) 306 307 return False
Checks if text can be identified given an identify option.
Arguments:
- text: The text to check.
- identify: "always" or
True
: Always returns true. "safe": True if the identifier is case-insensitive.
Returns:
Whether or not the given text can be identified.
TOKENIZER_CLASS =
<class 'BigQuery.Tokenizer'>
Inherited Members
- sqlglot.generator.Generator
- Generator
- NULL_ORDERING_SUPPORTED
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- TABLESAMPLE_WITH_METHOD
- TABLESAMPLE_SIZE_IS_PERCENT
- LIMIT_ONLY_LITERALS
- GROUPINGS_SEP
- INDEX_ON
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- LIMIT_IS_TOP
- RETURNING_END
- COLUMN_JOIN_MARKS_SUPPORTED
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- SELECT_KINDS
- VALUES_AS_TABLE
- ALTER_TABLE_ADD_COLUMN_KEYWORD
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_PARAMETERS
- COMPUTED_COLUMN_WITH_TYPE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- DATA_TYPE_SPECIFIERS_ALLOWED
- SUPPORTS_NESTED_CTES
- ENSURE_BOOLS
- CTE_RECURSIVE_KEYWORD_REQUIRED
- TYPED_DIVISION
- SAFE_DIVISION
- STAR_MAPPING
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- KEY_VALUE_DEFINITONS
- SENTINEL_LINE_BREAK
- INDEX_OFFSET
- ALIAS_POST_TABLESAMPLE
- IDENTIFIERS_CAN_START_WITH_DIGIT
- STRICT_STRING_CONCAT
- NULL_ORDERING
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- normalize_functions
- unsupported_messages
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- createable_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- intersect_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- safebracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- safeconcat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- formatjson_sql
- jsonobject_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- add_sql
- and_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- safedpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- log_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql