Edit on GitHub

sqlglot.dialects.bigquery

  1from __future__ import annotations
  2
  3import logging
  4import re
  5import typing as t
  6
  7from sqlglot import exp, generator, parser, tokens, transforms
  8from sqlglot._typing import E
  9from sqlglot.dialects.dialect import (
 10    Dialect,
 11    arg_max_or_min_no_count,
 12    binary_from_function,
 13    date_add_interval_sql,
 14    datestrtodate_sql,
 15    format_time_lambda,
 16    if_sql,
 17    inline_array_sql,
 18    json_keyvalue_comma_sql,
 19    max_or_greatest,
 20    min_or_least,
 21    no_ilike_sql,
 22    parse_date_delta_with_interval,
 23    regexp_replace_sql,
 24    rename_func,
 25    timestrtotime_sql,
 26    ts_or_ds_add_cast,
 27    ts_or_ds_to_date_sql,
 28)
 29from sqlglot.helper import seq_get, split_num_words
 30from sqlglot.tokens import TokenType
 31
 32logger = logging.getLogger("sqlglot")
 33
 34
 35def _derived_table_values_to_unnest(self: BigQuery.Generator, expression: exp.Values) -> str:
 36    if not expression.find_ancestor(exp.From, exp.Join):
 37        return self.values_sql(expression)
 38
 39    alias = expression.args.get("alias")
 40
 41    structs = [
 42        exp.Struct(
 43            expressions=[
 44                exp.alias_(value, column_name)
 45                for value, column_name in zip(
 46                    t.expressions,
 47                    alias.columns
 48                    if alias and alias.columns
 49                    else (f"_c{i}" for i in range(len(t.expressions))),
 50                )
 51            ]
 52        )
 53        for t in expression.find_all(exp.Tuple)
 54    ]
 55
 56    return self.unnest_sql(exp.Unnest(expressions=[exp.Array(expressions=structs)]))
 57
 58
 59def _returnsproperty_sql(self: BigQuery.Generator, expression: exp.ReturnsProperty) -> str:
 60    this = expression.this
 61    if isinstance(this, exp.Schema):
 62        this = f"{this.this} <{self.expressions(this)}>"
 63    else:
 64        this = self.sql(this)
 65    return f"RETURNS {this}"
 66
 67
 68def _create_sql(self: BigQuery.Generator, expression: exp.Create) -> str:
 69    kind = expression.args["kind"]
 70    returns = expression.find(exp.ReturnsProperty)
 71
 72    if kind.upper() == "FUNCTION" and returns and returns.args.get("is_table"):
 73        expression.set("kind", "TABLE FUNCTION")
 74
 75        if isinstance(expression.expression, (exp.Subquery, exp.Literal)):
 76            expression.set("expression", expression.expression.this)
 77
 78        return self.create_sql(expression)
 79
 80    return self.create_sql(expression)
 81
 82
 83def _unqualify_unnest(expression: exp.Expression) -> exp.Expression:
 84    """Remove references to unnest table aliases since bigquery doesn't allow them.
 85
 86    These are added by the optimizer's qualify_column step.
 87    """
 88    from sqlglot.optimizer.scope import find_all_in_scope
 89
 90    if isinstance(expression, exp.Select):
 91        unnest_aliases = {
 92            unnest.alias
 93            for unnest in find_all_in_scope(expression, exp.Unnest)
 94            if isinstance(unnest.parent, (exp.From, exp.Join))
 95        }
 96        if unnest_aliases:
 97            for column in expression.find_all(exp.Column):
 98                if column.table in unnest_aliases:
 99                    column.set("table", None)
100                elif column.db in unnest_aliases:
101                    column.set("db", None)
102
103    return expression
104
105
106# https://issuetracker.google.com/issues/162294746
107# workaround for bigquery bug when grouping by an expression and then ordering
108# WITH x AS (SELECT 1 y)
109# SELECT y + 1 z
110# FROM x
111# GROUP BY x + 1
112# ORDER by z
113def _alias_ordered_group(expression: exp.Expression) -> exp.Expression:
114    if isinstance(expression, exp.Select):
115        group = expression.args.get("group")
116        order = expression.args.get("order")
117
118        if group and order:
119            aliases = {
120                select.this: select.args["alias"]
121                for select in expression.selects
122                if isinstance(select, exp.Alias)
123            }
124
125            for e in group.expressions:
126                alias = aliases.get(e)
127
128                if alias:
129                    e.replace(exp.column(alias))
130
131    return expression
132
133
134def _pushdown_cte_column_names(expression: exp.Expression) -> exp.Expression:
135    """BigQuery doesn't allow column names when defining a CTE, so we try to push them down."""
136    if isinstance(expression, exp.CTE) and expression.alias_column_names:
137        cte_query = expression.this
138
139        if cte_query.is_star:
140            logger.warning(
141                "Can't push down CTE column names for star queries. Run the query through"
142                " the optimizer or use 'qualify' to expand the star projections first."
143            )
144            return expression
145
146        column_names = expression.alias_column_names
147        expression.args["alias"].set("columns", None)
148
149        for name, select in zip(column_names, cte_query.selects):
150            to_replace = select
151
152            if isinstance(select, exp.Alias):
153                select = select.this
154
155            # Inner aliases are shadowed by the CTE column names
156            to_replace.replace(exp.alias_(select, name))
157
158    return expression
159
160
161def _parse_timestamp(args: t.List) -> exp.StrToTime:
162    this = format_time_lambda(exp.StrToTime, "bigquery")([seq_get(args, 1), seq_get(args, 0)])
163    this.set("zone", seq_get(args, 2))
164    return this
165
166
167def _parse_date(args: t.List) -> exp.Date | exp.DateFromParts:
168    expr_type = exp.DateFromParts if len(args) == 3 else exp.Date
169    return expr_type.from_arg_list(args)
170
171
172def _parse_to_hex(args: t.List) -> exp.Hex | exp.MD5:
173    # TO_HEX(MD5(..)) is common in BigQuery, so it's parsed into MD5 to simplify its transpilation
174    arg = seq_get(args, 0)
175    return exp.MD5(this=arg.this) if isinstance(arg, exp.MD5Digest) else exp.Hex(this=arg)
176
177
178def _array_contains_sql(self: BigQuery.Generator, expression: exp.ArrayContains) -> str:
179    return self.sql(
180        exp.Exists(
181            this=exp.select("1")
182            .from_(exp.Unnest(expressions=[expression.left]).as_("_unnest", table=["_col"]))
183            .where(exp.column("_col").eq(expression.right))
184        )
185    )
186
187
188def _ts_or_ds_add_sql(self: BigQuery.Generator, expression: exp.TsOrDsAdd) -> str:
189    return date_add_interval_sql("DATE", "ADD")(self, ts_or_ds_add_cast(expression))
190
191
192def _ts_or_ds_diff_sql(self: BigQuery.Generator, expression: exp.TsOrDsDiff) -> str:
193    expression.this.replace(exp.cast(expression.this, "TIMESTAMP", copy=True))
194    expression.expression.replace(exp.cast(expression.expression, "TIMESTAMP", copy=True))
195    unit = expression.args.get("unit") or "DAY"
196    return self.func("DATE_DIFF", expression.this, expression.expression, unit)
197
198
199def _unix_to_time_sql(self: BigQuery.Generator, expression: exp.UnixToTime) -> str:
200    scale = expression.args.get("scale")
201    timestamp = self.sql(expression, "this")
202    if scale in (None, exp.UnixToTime.SECONDS):
203        return f"TIMESTAMP_SECONDS({timestamp})"
204    if scale == exp.UnixToTime.MILLIS:
205        return f"TIMESTAMP_MILLIS({timestamp})"
206    if scale == exp.UnixToTime.MICROS:
207        return f"TIMESTAMP_MICROS({timestamp})"
208    if scale == exp.UnixToTime.NANOS:
209        # We need to cast to INT64 because that's what BQ expects
210        return f"TIMESTAMP_MICROS(CAST({timestamp} / 1000 AS INT64))"
211
212    self.unsupported(f"Unsupported scale for timestamp: {scale}.")
213    return ""
214
215
216class BigQuery(Dialect):
217    UNNEST_COLUMN_ONLY = True
218    SUPPORTS_USER_DEFINED_TYPES = False
219    SUPPORTS_SEMI_ANTI_JOIN = False
220    LOG_BASE_FIRST = False
221
222    # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity
223    RESOLVES_IDENTIFIERS_AS_UPPERCASE = None
224
225    # bigquery udfs are case sensitive
226    NORMALIZE_FUNCTIONS = False
227
228    TIME_MAPPING = {
229        "%D": "%m/%d/%y",
230    }
231
232    ESCAPE_SEQUENCES = {
233        "\\a": "\a",
234        "\\b": "\b",
235        "\\f": "\f",
236        "\\n": "\n",
237        "\\r": "\r",
238        "\\t": "\t",
239        "\\v": "\v",
240    }
241
242    FORMAT_MAPPING = {
243        "DD": "%d",
244        "MM": "%m",
245        "MON": "%b",
246        "MONTH": "%B",
247        "YYYY": "%Y",
248        "YY": "%y",
249        "HH": "%I",
250        "HH12": "%I",
251        "HH24": "%H",
252        "MI": "%M",
253        "SS": "%S",
254        "SSSSS": "%f",
255        "TZH": "%z",
256    }
257
258    # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
259    # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
260    PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"}
261
262    @classmethod
263    def normalize_identifier(cls, expression: E) -> E:
264        if isinstance(expression, exp.Identifier):
265            parent = expression.parent
266            while isinstance(parent, exp.Dot):
267                parent = parent.parent
268
269            # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least).
270            # The following check is essentially a heuristic to detect tables based on whether or
271            # not they're qualified. It also avoids normalizing UDFs, because they're case-sensitive.
272            if (
273                not isinstance(parent, exp.UserDefinedFunction)
274                and not (isinstance(parent, exp.Table) and parent.db)
275                and not expression.meta.get("is_table")
276            ):
277                expression.set("this", expression.this.lower())
278
279        return expression
280
281    class Tokenizer(tokens.Tokenizer):
282        QUOTES = ["'", '"', '"""', "'''"]
283        COMMENTS = ["--", "#", ("/*", "*/")]
284        IDENTIFIERS = ["`"]
285        STRING_ESCAPES = ["\\"]
286
287        HEX_STRINGS = [("0x", ""), ("0X", "")]
288
289        BYTE_STRINGS = [
290            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
291        ]
292
293        RAW_STRINGS = [
294            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
295        ]
296
297        KEYWORDS = {
298            **tokens.Tokenizer.KEYWORDS,
299            "ANY TYPE": TokenType.VARIANT,
300            "BEGIN": TokenType.COMMAND,
301            "BEGIN TRANSACTION": TokenType.BEGIN,
302            "BYTES": TokenType.BINARY,
303            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
304            "DECLARE": TokenType.COMMAND,
305            "FLOAT64": TokenType.DOUBLE,
306            "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
307            "MODEL": TokenType.MODEL,
308            "NOT DETERMINISTIC": TokenType.VOLATILE,
309            "RECORD": TokenType.STRUCT,
310            "TIMESTAMP": TokenType.TIMESTAMPTZ,
311        }
312        KEYWORDS.pop("DIV")
313
314    class Parser(parser.Parser):
315        PREFIXED_PIVOT_COLUMNS = True
316
317        LOG_DEFAULTS_TO_LN = True
318
319        FUNCTIONS = {
320            **parser.Parser.FUNCTIONS,
321            "DATE": _parse_date,
322            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
323            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
324            "DATE_TRUNC": lambda args: exp.DateTrunc(
325                unit=exp.Literal.string(str(seq_get(args, 1))),
326                this=seq_get(args, 0),
327            ),
328            "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd),
329            "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub),
330            "DIV": binary_from_function(exp.IntDiv),
331            "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
332            "MD5": exp.MD5Digest.from_arg_list,
333            "TO_HEX": _parse_to_hex,
334            "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")(
335                [seq_get(args, 1), seq_get(args, 0)]
336            ),
337            "PARSE_TIMESTAMP": _parse_timestamp,
338            "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
339            "REGEXP_EXTRACT": lambda args: exp.RegexpExtract(
340                this=seq_get(args, 0),
341                expression=seq_get(args, 1),
342                position=seq_get(args, 2),
343                occurrence=seq_get(args, 3),
344                group=exp.Literal.number(1) if re.compile(args[1].name).groups == 1 else None,
345            ),
346            "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)),
347            "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)),
348            "SPLIT": lambda args: exp.Split(
349                # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split
350                this=seq_get(args, 0),
351                expression=seq_get(args, 1) or exp.Literal.string(","),
352            ),
353            "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd),
354            "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub),
355            "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd),
356            "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub),
357            "TIMESTAMP_MICROS": lambda args: exp.UnixToTime(
358                this=seq_get(args, 0), scale=exp.UnixToTime.MICROS
359            ),
360            "TIMESTAMP_MILLIS": lambda args: exp.UnixToTime(
361                this=seq_get(args, 0), scale=exp.UnixToTime.MILLIS
362            ),
363            "TIMESTAMP_SECONDS": lambda args: exp.UnixToTime(
364                this=seq_get(args, 0), scale=exp.UnixToTime.SECONDS
365            ),
366            "TO_JSON_STRING": exp.JSONFormat.from_arg_list,
367        }
368
369        FUNCTION_PARSERS = {
370            **parser.Parser.FUNCTION_PARSERS,
371            "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
372        }
373        FUNCTION_PARSERS.pop("TRIM")
374
375        NO_PAREN_FUNCTIONS = {
376            **parser.Parser.NO_PAREN_FUNCTIONS,
377            TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
378        }
379
380        NESTED_TYPE_TOKENS = {
381            *parser.Parser.NESTED_TYPE_TOKENS,
382            TokenType.TABLE,
383        }
384
385        ID_VAR_TOKENS = {
386            *parser.Parser.ID_VAR_TOKENS,
387            TokenType.VALUES,
388        }
389
390        PROPERTY_PARSERS = {
391            **parser.Parser.PROPERTY_PARSERS,
392            "NOT DETERMINISTIC": lambda self: self.expression(
393                exp.StabilityProperty, this=exp.Literal.string("VOLATILE")
394            ),
395            "OPTIONS": lambda self: self._parse_with_property(),
396        }
397
398        CONSTRAINT_PARSERS = {
399            **parser.Parser.CONSTRAINT_PARSERS,
400            "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()),
401        }
402
403        RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy()
404        RANGE_PARSERS.pop(TokenType.OVERLAPS, None)
405
406        NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN}
407
408        STATEMENT_PARSERS = {
409            **parser.Parser.STATEMENT_PARSERS,
410            TokenType.END: lambda self: self._parse_as_command(self._prev),
411            TokenType.FOR: lambda self: self._parse_for_in(),
412        }
413
414        def _parse_for_in(self) -> exp.ForIn:
415            this = self._parse_range()
416            self._match_text_seq("DO")
417            return self.expression(exp.ForIn, this=this, expression=self._parse_statement())
418
419        def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
420            this = super()._parse_table_part(schema=schema) or self._parse_number()
421
422            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names
423            if isinstance(this, exp.Identifier):
424                table_name = this.name
425                while self._match(TokenType.DASH, advance=False) and self._next:
426                    self._advance(2)
427                    table_name += f"-{self._prev.text}"
428
429                this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
430            elif isinstance(this, exp.Literal):
431                table_name = this.name
432
433                if (
434                    self._curr
435                    and self._prev.end == self._curr.start - 1
436                    and self._parse_var(any_token=True)
437                ):
438                    table_name += self._prev.text
439
440                this = exp.Identifier(this=table_name, quoted=True)
441
442            return this
443
444        def _parse_table_parts(self, schema: bool = False) -> exp.Table:
445            table = super()._parse_table_parts(schema=schema)
446            if isinstance(table.this, exp.Identifier) and "." in table.name:
447                catalog, db, this, *rest = (
448                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x))
449                    for x in split_num_words(table.name, ".", 3)
450                )
451
452                if rest and this:
453                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
454
455                table = exp.Table(this=this, db=db, catalog=catalog)
456
457            return table
458
459        def _parse_json_object(self) -> exp.JSONObject:
460            json_object = super()._parse_json_object()
461            array_kv_pair = seq_get(json_object.expressions, 0)
462
463            # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation
464            # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2
465            if (
466                array_kv_pair
467                and isinstance(array_kv_pair.this, exp.Array)
468                and isinstance(array_kv_pair.expression, exp.Array)
469            ):
470                keys = array_kv_pair.this.expressions
471                values = array_kv_pair.expression.expressions
472
473                json_object.set(
474                    "expressions",
475                    [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)],
476                )
477
478            return json_object
479
480    class Generator(generator.Generator):
481        EXPLICIT_UNION = True
482        INTERVAL_ALLOWS_PLURAL_FORM = False
483        JOIN_HINTS = False
484        QUERY_HINTS = False
485        TABLE_HINTS = False
486        LIMIT_FETCH = "LIMIT"
487        RENAME_TABLE_WITH_DB = False
488        NVL2_SUPPORTED = False
489        UNNEST_WITH_ORDINALITY = False
490        COLLATE_IS_FUNC = True
491        LIMIT_ONLY_LITERALS = True
492
493        TRANSFORMS = {
494            **generator.Generator.TRANSFORMS,
495            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
496            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
497            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
498            exp.ArrayContains: _array_contains_sql,
499            exp.ArraySize: rename_func("ARRAY_LENGTH"),
500            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
501            exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}"
502            if e.args.get("default")
503            else f"COLLATE {self.sql(e, 'this')}",
504            exp.Create: _create_sql,
505            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
506            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
507            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
508            exp.DateFromParts: rename_func("DATE"),
509            exp.DateStrToDate: datestrtodate_sql,
510            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
511            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
512            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
513            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
514            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
515            exp.GroupConcat: rename_func("STRING_AGG"),
516            exp.Hex: rename_func("TO_HEX"),
517            exp.If: if_sql(false_value="NULL"),
518            exp.ILike: no_ilike_sql,
519            exp.IntDiv: rename_func("DIV"),
520            exp.JSONFormat: rename_func("TO_JSON_STRING"),
521            exp.JSONKeyValue: json_keyvalue_comma_sql,
522            exp.Max: max_or_greatest,
523            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
524            exp.MD5Digest: rename_func("MD5"),
525            exp.Min: min_or_least,
526            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
527            exp.RegexpExtract: lambda self, e: self.func(
528                "REGEXP_EXTRACT",
529                e.this,
530                e.expression,
531                e.args.get("position"),
532                e.args.get("occurrence"),
533            ),
534            exp.RegexpReplace: regexp_replace_sql,
535            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
536            exp.ReturnsProperty: _returnsproperty_sql,
537            exp.Select: transforms.preprocess(
538                [
539                    transforms.explode_to_unnest(),
540                    _unqualify_unnest,
541                    transforms.eliminate_distinct_on,
542                    _alias_ordered_group,
543                    transforms.eliminate_semi_and_anti_joins,
544                ]
545            ),
546            exp.SHA2: lambda self, e: self.func(
547                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
548            ),
549            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
550            if e.name == "IMMUTABLE"
551            else "NOT DETERMINISTIC",
552            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
553            exp.StrToTime: lambda self, e: self.func(
554                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
555            ),
556            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
557            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
558            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
559            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
560            exp.TimeStrToTime: timestrtotime_sql,
561            exp.TimeToStr: lambda self, e: f"FORMAT_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
562            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
563            exp.TsOrDsAdd: _ts_or_ds_add_sql,
564            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
565            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
566            exp.Unhex: rename_func("FROM_HEX"),
567            exp.UnixToTime: _unix_to_time_sql,
568            exp.Values: _derived_table_values_to_unnest,
569            exp.VariancePop: rename_func("VAR_POP"),
570        }
571
572        TYPE_MAPPING = {
573            **generator.Generator.TYPE_MAPPING,
574            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
575            exp.DataType.Type.BIGINT: "INT64",
576            exp.DataType.Type.BINARY: "BYTES",
577            exp.DataType.Type.BOOLEAN: "BOOL",
578            exp.DataType.Type.CHAR: "STRING",
579            exp.DataType.Type.DECIMAL: "NUMERIC",
580            exp.DataType.Type.DOUBLE: "FLOAT64",
581            exp.DataType.Type.FLOAT: "FLOAT64",
582            exp.DataType.Type.INT: "INT64",
583            exp.DataType.Type.NCHAR: "STRING",
584            exp.DataType.Type.NVARCHAR: "STRING",
585            exp.DataType.Type.SMALLINT: "INT64",
586            exp.DataType.Type.TEXT: "STRING",
587            exp.DataType.Type.TIMESTAMP: "DATETIME",
588            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
589            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
590            exp.DataType.Type.TINYINT: "INT64",
591            exp.DataType.Type.VARBINARY: "BYTES",
592            exp.DataType.Type.VARCHAR: "STRING",
593            exp.DataType.Type.VARIANT: "ANY TYPE",
594        }
595
596        PROPERTIES_LOCATION = {
597            **generator.Generator.PROPERTIES_LOCATION,
598            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
599            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
600        }
601
602        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
603        RESERVED_KEYWORDS = {
604            *generator.Generator.RESERVED_KEYWORDS,
605            "all",
606            "and",
607            "any",
608            "array",
609            "as",
610            "asc",
611            "assert_rows_modified",
612            "at",
613            "between",
614            "by",
615            "case",
616            "cast",
617            "collate",
618            "contains",
619            "create",
620            "cross",
621            "cube",
622            "current",
623            "default",
624            "define",
625            "desc",
626            "distinct",
627            "else",
628            "end",
629            "enum",
630            "escape",
631            "except",
632            "exclude",
633            "exists",
634            "extract",
635            "false",
636            "fetch",
637            "following",
638            "for",
639            "from",
640            "full",
641            "group",
642            "grouping",
643            "groups",
644            "hash",
645            "having",
646            "if",
647            "ignore",
648            "in",
649            "inner",
650            "intersect",
651            "interval",
652            "into",
653            "is",
654            "join",
655            "lateral",
656            "left",
657            "like",
658            "limit",
659            "lookup",
660            "merge",
661            "natural",
662            "new",
663            "no",
664            "not",
665            "null",
666            "nulls",
667            "of",
668            "on",
669            "or",
670            "order",
671            "outer",
672            "over",
673            "partition",
674            "preceding",
675            "proto",
676            "qualify",
677            "range",
678            "recursive",
679            "respect",
680            "right",
681            "rollup",
682            "rows",
683            "select",
684            "set",
685            "some",
686            "struct",
687            "tablesample",
688            "then",
689            "to",
690            "treat",
691            "true",
692            "unbounded",
693            "union",
694            "unnest",
695            "using",
696            "when",
697            "where",
698            "window",
699            "with",
700            "within",
701        }
702
703        def eq_sql(self, expression: exp.EQ) -> str:
704            # Operands of = cannot be NULL in BigQuery
705            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
706                return "NULL"
707
708            return self.binary(expression, "=")
709
710        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
711            parent = expression.parent
712
713            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
714            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
715            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
716                return self.func(
717                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
718                )
719
720            return super().attimezone_sql(expression)
721
722        def trycast_sql(self, expression: exp.TryCast) -> str:
723            return self.cast_sql(expression, safe_prefix="SAFE_")
724
725        def cte_sql(self, expression: exp.CTE) -> str:
726            if expression.alias_column_names:
727                self.unsupported("Column names in CTE definition are not supported.")
728            return super().cte_sql(expression)
729
730        def array_sql(self, expression: exp.Array) -> str:
731            first_arg = seq_get(expression.expressions, 0)
732            if isinstance(first_arg, exp.Subqueryable):
733                return f"ARRAY{self.wrap(self.sql(first_arg))}"
734
735            return inline_array_sql(self, expression)
736
737        def transaction_sql(self, *_) -> str:
738            return "BEGIN TRANSACTION"
739
740        def commit_sql(self, *_) -> str:
741            return "COMMIT TRANSACTION"
742
743        def rollback_sql(self, *_) -> str:
744            return "ROLLBACK TRANSACTION"
745
746        def in_unnest_op(self, expression: exp.Unnest) -> str:
747            return self.sql(expression)
748
749        def except_op(self, expression: exp.Except) -> str:
750            if not expression.args.get("distinct", False):
751                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
752            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
753
754        def intersect_op(self, expression: exp.Intersect) -> str:
755            if not expression.args.get("distinct", False):
756                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
757            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
758
759        def with_properties(self, properties: exp.Properties) -> str:
760            return self.properties(properties, prefix=self.seg("OPTIONS"))
761
762        def version_sql(self, expression: exp.Version) -> str:
763            if expression.name == "TIMESTAMP":
764                expression.set("this", "SYSTEM_TIME")
765            return super().version_sql(expression)
logger = <Logger sqlglot (WARNING)>
class BigQuery(sqlglot.dialects.dialect.Dialect):
217class BigQuery(Dialect):
218    UNNEST_COLUMN_ONLY = True
219    SUPPORTS_USER_DEFINED_TYPES = False
220    SUPPORTS_SEMI_ANTI_JOIN = False
221    LOG_BASE_FIRST = False
222
223    # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#case_sensitivity
224    RESOLVES_IDENTIFIERS_AS_UPPERCASE = None
225
226    # bigquery udfs are case sensitive
227    NORMALIZE_FUNCTIONS = False
228
229    TIME_MAPPING = {
230        "%D": "%m/%d/%y",
231    }
232
233    ESCAPE_SEQUENCES = {
234        "\\a": "\a",
235        "\\b": "\b",
236        "\\f": "\f",
237        "\\n": "\n",
238        "\\r": "\r",
239        "\\t": "\t",
240        "\\v": "\v",
241    }
242
243    FORMAT_MAPPING = {
244        "DD": "%d",
245        "MM": "%m",
246        "MON": "%b",
247        "MONTH": "%B",
248        "YYYY": "%Y",
249        "YY": "%y",
250        "HH": "%I",
251        "HH12": "%I",
252        "HH24": "%H",
253        "MI": "%M",
254        "SS": "%S",
255        "SSSSS": "%f",
256        "TZH": "%z",
257    }
258
259    # The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
260    # https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
261    PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE"}
262
263    @classmethod
264    def normalize_identifier(cls, expression: E) -> E:
265        if isinstance(expression, exp.Identifier):
266            parent = expression.parent
267            while isinstance(parent, exp.Dot):
268                parent = parent.parent
269
270            # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least).
271            # The following check is essentially a heuristic to detect tables based on whether or
272            # not they're qualified. It also avoids normalizing UDFs, because they're case-sensitive.
273            if (
274                not isinstance(parent, exp.UserDefinedFunction)
275                and not (isinstance(parent, exp.Table) and parent.db)
276                and not expression.meta.get("is_table")
277            ):
278                expression.set("this", expression.this.lower())
279
280        return expression
281
282    class Tokenizer(tokens.Tokenizer):
283        QUOTES = ["'", '"', '"""', "'''"]
284        COMMENTS = ["--", "#", ("/*", "*/")]
285        IDENTIFIERS = ["`"]
286        STRING_ESCAPES = ["\\"]
287
288        HEX_STRINGS = [("0x", ""), ("0X", "")]
289
290        BYTE_STRINGS = [
291            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
292        ]
293
294        RAW_STRINGS = [
295            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
296        ]
297
298        KEYWORDS = {
299            **tokens.Tokenizer.KEYWORDS,
300            "ANY TYPE": TokenType.VARIANT,
301            "BEGIN": TokenType.COMMAND,
302            "BEGIN TRANSACTION": TokenType.BEGIN,
303            "BYTES": TokenType.BINARY,
304            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
305            "DECLARE": TokenType.COMMAND,
306            "FLOAT64": TokenType.DOUBLE,
307            "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
308            "MODEL": TokenType.MODEL,
309            "NOT DETERMINISTIC": TokenType.VOLATILE,
310            "RECORD": TokenType.STRUCT,
311            "TIMESTAMP": TokenType.TIMESTAMPTZ,
312        }
313        KEYWORDS.pop("DIV")
314
315    class Parser(parser.Parser):
316        PREFIXED_PIVOT_COLUMNS = True
317
318        LOG_DEFAULTS_TO_LN = True
319
320        FUNCTIONS = {
321            **parser.Parser.FUNCTIONS,
322            "DATE": _parse_date,
323            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
324            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
325            "DATE_TRUNC": lambda args: exp.DateTrunc(
326                unit=exp.Literal.string(str(seq_get(args, 1))),
327                this=seq_get(args, 0),
328            ),
329            "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd),
330            "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub),
331            "DIV": binary_from_function(exp.IntDiv),
332            "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
333            "MD5": exp.MD5Digest.from_arg_list,
334            "TO_HEX": _parse_to_hex,
335            "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")(
336                [seq_get(args, 1), seq_get(args, 0)]
337            ),
338            "PARSE_TIMESTAMP": _parse_timestamp,
339            "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
340            "REGEXP_EXTRACT": lambda args: exp.RegexpExtract(
341                this=seq_get(args, 0),
342                expression=seq_get(args, 1),
343                position=seq_get(args, 2),
344                occurrence=seq_get(args, 3),
345                group=exp.Literal.number(1) if re.compile(args[1].name).groups == 1 else None,
346            ),
347            "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)),
348            "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)),
349            "SPLIT": lambda args: exp.Split(
350                # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split
351                this=seq_get(args, 0),
352                expression=seq_get(args, 1) or exp.Literal.string(","),
353            ),
354            "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd),
355            "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub),
356            "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd),
357            "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub),
358            "TIMESTAMP_MICROS": lambda args: exp.UnixToTime(
359                this=seq_get(args, 0), scale=exp.UnixToTime.MICROS
360            ),
361            "TIMESTAMP_MILLIS": lambda args: exp.UnixToTime(
362                this=seq_get(args, 0), scale=exp.UnixToTime.MILLIS
363            ),
364            "TIMESTAMP_SECONDS": lambda args: exp.UnixToTime(
365                this=seq_get(args, 0), scale=exp.UnixToTime.SECONDS
366            ),
367            "TO_JSON_STRING": exp.JSONFormat.from_arg_list,
368        }
369
370        FUNCTION_PARSERS = {
371            **parser.Parser.FUNCTION_PARSERS,
372            "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
373        }
374        FUNCTION_PARSERS.pop("TRIM")
375
376        NO_PAREN_FUNCTIONS = {
377            **parser.Parser.NO_PAREN_FUNCTIONS,
378            TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
379        }
380
381        NESTED_TYPE_TOKENS = {
382            *parser.Parser.NESTED_TYPE_TOKENS,
383            TokenType.TABLE,
384        }
385
386        ID_VAR_TOKENS = {
387            *parser.Parser.ID_VAR_TOKENS,
388            TokenType.VALUES,
389        }
390
391        PROPERTY_PARSERS = {
392            **parser.Parser.PROPERTY_PARSERS,
393            "NOT DETERMINISTIC": lambda self: self.expression(
394                exp.StabilityProperty, this=exp.Literal.string("VOLATILE")
395            ),
396            "OPTIONS": lambda self: self._parse_with_property(),
397        }
398
399        CONSTRAINT_PARSERS = {
400            **parser.Parser.CONSTRAINT_PARSERS,
401            "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()),
402        }
403
404        RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy()
405        RANGE_PARSERS.pop(TokenType.OVERLAPS, None)
406
407        NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN}
408
409        STATEMENT_PARSERS = {
410            **parser.Parser.STATEMENT_PARSERS,
411            TokenType.END: lambda self: self._parse_as_command(self._prev),
412            TokenType.FOR: lambda self: self._parse_for_in(),
413        }
414
415        def _parse_for_in(self) -> exp.ForIn:
416            this = self._parse_range()
417            self._match_text_seq("DO")
418            return self.expression(exp.ForIn, this=this, expression=self._parse_statement())
419
420        def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
421            this = super()._parse_table_part(schema=schema) or self._parse_number()
422
423            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names
424            if isinstance(this, exp.Identifier):
425                table_name = this.name
426                while self._match(TokenType.DASH, advance=False) and self._next:
427                    self._advance(2)
428                    table_name += f"-{self._prev.text}"
429
430                this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
431            elif isinstance(this, exp.Literal):
432                table_name = this.name
433
434                if (
435                    self._curr
436                    and self._prev.end == self._curr.start - 1
437                    and self._parse_var(any_token=True)
438                ):
439                    table_name += self._prev.text
440
441                this = exp.Identifier(this=table_name, quoted=True)
442
443            return this
444
445        def _parse_table_parts(self, schema: bool = False) -> exp.Table:
446            table = super()._parse_table_parts(schema=schema)
447            if isinstance(table.this, exp.Identifier) and "." in table.name:
448                catalog, db, this, *rest = (
449                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x))
450                    for x in split_num_words(table.name, ".", 3)
451                )
452
453                if rest and this:
454                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
455
456                table = exp.Table(this=this, db=db, catalog=catalog)
457
458            return table
459
460        def _parse_json_object(self) -> exp.JSONObject:
461            json_object = super()._parse_json_object()
462            array_kv_pair = seq_get(json_object.expressions, 0)
463
464            # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation
465            # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2
466            if (
467                array_kv_pair
468                and isinstance(array_kv_pair.this, exp.Array)
469                and isinstance(array_kv_pair.expression, exp.Array)
470            ):
471                keys = array_kv_pair.this.expressions
472                values = array_kv_pair.expression.expressions
473
474                json_object.set(
475                    "expressions",
476                    [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)],
477                )
478
479            return json_object
480
481    class Generator(generator.Generator):
482        EXPLICIT_UNION = True
483        INTERVAL_ALLOWS_PLURAL_FORM = False
484        JOIN_HINTS = False
485        QUERY_HINTS = False
486        TABLE_HINTS = False
487        LIMIT_FETCH = "LIMIT"
488        RENAME_TABLE_WITH_DB = False
489        NVL2_SUPPORTED = False
490        UNNEST_WITH_ORDINALITY = False
491        COLLATE_IS_FUNC = True
492        LIMIT_ONLY_LITERALS = True
493
494        TRANSFORMS = {
495            **generator.Generator.TRANSFORMS,
496            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
497            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
498            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
499            exp.ArrayContains: _array_contains_sql,
500            exp.ArraySize: rename_func("ARRAY_LENGTH"),
501            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
502            exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}"
503            if e.args.get("default")
504            else f"COLLATE {self.sql(e, 'this')}",
505            exp.Create: _create_sql,
506            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
507            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
508            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
509            exp.DateFromParts: rename_func("DATE"),
510            exp.DateStrToDate: datestrtodate_sql,
511            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
512            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
513            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
514            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
515            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
516            exp.GroupConcat: rename_func("STRING_AGG"),
517            exp.Hex: rename_func("TO_HEX"),
518            exp.If: if_sql(false_value="NULL"),
519            exp.ILike: no_ilike_sql,
520            exp.IntDiv: rename_func("DIV"),
521            exp.JSONFormat: rename_func("TO_JSON_STRING"),
522            exp.JSONKeyValue: json_keyvalue_comma_sql,
523            exp.Max: max_or_greatest,
524            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
525            exp.MD5Digest: rename_func("MD5"),
526            exp.Min: min_or_least,
527            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
528            exp.RegexpExtract: lambda self, e: self.func(
529                "REGEXP_EXTRACT",
530                e.this,
531                e.expression,
532                e.args.get("position"),
533                e.args.get("occurrence"),
534            ),
535            exp.RegexpReplace: regexp_replace_sql,
536            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
537            exp.ReturnsProperty: _returnsproperty_sql,
538            exp.Select: transforms.preprocess(
539                [
540                    transforms.explode_to_unnest(),
541                    _unqualify_unnest,
542                    transforms.eliminate_distinct_on,
543                    _alias_ordered_group,
544                    transforms.eliminate_semi_and_anti_joins,
545                ]
546            ),
547            exp.SHA2: lambda self, e: self.func(
548                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
549            ),
550            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
551            if e.name == "IMMUTABLE"
552            else "NOT DETERMINISTIC",
553            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
554            exp.StrToTime: lambda self, e: self.func(
555                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
556            ),
557            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
558            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
559            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
560            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
561            exp.TimeStrToTime: timestrtotime_sql,
562            exp.TimeToStr: lambda self, e: f"FORMAT_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
563            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
564            exp.TsOrDsAdd: _ts_or_ds_add_sql,
565            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
566            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
567            exp.Unhex: rename_func("FROM_HEX"),
568            exp.UnixToTime: _unix_to_time_sql,
569            exp.Values: _derived_table_values_to_unnest,
570            exp.VariancePop: rename_func("VAR_POP"),
571        }
572
573        TYPE_MAPPING = {
574            **generator.Generator.TYPE_MAPPING,
575            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
576            exp.DataType.Type.BIGINT: "INT64",
577            exp.DataType.Type.BINARY: "BYTES",
578            exp.DataType.Type.BOOLEAN: "BOOL",
579            exp.DataType.Type.CHAR: "STRING",
580            exp.DataType.Type.DECIMAL: "NUMERIC",
581            exp.DataType.Type.DOUBLE: "FLOAT64",
582            exp.DataType.Type.FLOAT: "FLOAT64",
583            exp.DataType.Type.INT: "INT64",
584            exp.DataType.Type.NCHAR: "STRING",
585            exp.DataType.Type.NVARCHAR: "STRING",
586            exp.DataType.Type.SMALLINT: "INT64",
587            exp.DataType.Type.TEXT: "STRING",
588            exp.DataType.Type.TIMESTAMP: "DATETIME",
589            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
590            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
591            exp.DataType.Type.TINYINT: "INT64",
592            exp.DataType.Type.VARBINARY: "BYTES",
593            exp.DataType.Type.VARCHAR: "STRING",
594            exp.DataType.Type.VARIANT: "ANY TYPE",
595        }
596
597        PROPERTIES_LOCATION = {
598            **generator.Generator.PROPERTIES_LOCATION,
599            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
600            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
601        }
602
603        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
604        RESERVED_KEYWORDS = {
605            *generator.Generator.RESERVED_KEYWORDS,
606            "all",
607            "and",
608            "any",
609            "array",
610            "as",
611            "asc",
612            "assert_rows_modified",
613            "at",
614            "between",
615            "by",
616            "case",
617            "cast",
618            "collate",
619            "contains",
620            "create",
621            "cross",
622            "cube",
623            "current",
624            "default",
625            "define",
626            "desc",
627            "distinct",
628            "else",
629            "end",
630            "enum",
631            "escape",
632            "except",
633            "exclude",
634            "exists",
635            "extract",
636            "false",
637            "fetch",
638            "following",
639            "for",
640            "from",
641            "full",
642            "group",
643            "grouping",
644            "groups",
645            "hash",
646            "having",
647            "if",
648            "ignore",
649            "in",
650            "inner",
651            "intersect",
652            "interval",
653            "into",
654            "is",
655            "join",
656            "lateral",
657            "left",
658            "like",
659            "limit",
660            "lookup",
661            "merge",
662            "natural",
663            "new",
664            "no",
665            "not",
666            "null",
667            "nulls",
668            "of",
669            "on",
670            "or",
671            "order",
672            "outer",
673            "over",
674            "partition",
675            "preceding",
676            "proto",
677            "qualify",
678            "range",
679            "recursive",
680            "respect",
681            "right",
682            "rollup",
683            "rows",
684            "select",
685            "set",
686            "some",
687            "struct",
688            "tablesample",
689            "then",
690            "to",
691            "treat",
692            "true",
693            "unbounded",
694            "union",
695            "unnest",
696            "using",
697            "when",
698            "where",
699            "window",
700            "with",
701            "within",
702        }
703
704        def eq_sql(self, expression: exp.EQ) -> str:
705            # Operands of = cannot be NULL in BigQuery
706            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
707                return "NULL"
708
709            return self.binary(expression, "=")
710
711        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
712            parent = expression.parent
713
714            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
715            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
716            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
717                return self.func(
718                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
719                )
720
721            return super().attimezone_sql(expression)
722
723        def trycast_sql(self, expression: exp.TryCast) -> str:
724            return self.cast_sql(expression, safe_prefix="SAFE_")
725
726        def cte_sql(self, expression: exp.CTE) -> str:
727            if expression.alias_column_names:
728                self.unsupported("Column names in CTE definition are not supported.")
729            return super().cte_sql(expression)
730
731        def array_sql(self, expression: exp.Array) -> str:
732            first_arg = seq_get(expression.expressions, 0)
733            if isinstance(first_arg, exp.Subqueryable):
734                return f"ARRAY{self.wrap(self.sql(first_arg))}"
735
736            return inline_array_sql(self, expression)
737
738        def transaction_sql(self, *_) -> str:
739            return "BEGIN TRANSACTION"
740
741        def commit_sql(self, *_) -> str:
742            return "COMMIT TRANSACTION"
743
744        def rollback_sql(self, *_) -> str:
745            return "ROLLBACK TRANSACTION"
746
747        def in_unnest_op(self, expression: exp.Unnest) -> str:
748            return self.sql(expression)
749
750        def except_op(self, expression: exp.Except) -> str:
751            if not expression.args.get("distinct", False):
752                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
753            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
754
755        def intersect_op(self, expression: exp.Intersect) -> str:
756            if not expression.args.get("distinct", False):
757                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
758            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
759
760        def with_properties(self, properties: exp.Properties) -> str:
761            return self.properties(properties, prefix=self.seg("OPTIONS"))
762
763        def version_sql(self, expression: exp.Version) -> str:
764            if expression.name == "TIMESTAMP":
765                expression.set("this", "SYSTEM_TIME")
766            return super().version_sql(expression)
UNNEST_COLUMN_ONLY = True
SUPPORTS_USER_DEFINED_TYPES = False
SUPPORTS_SEMI_ANTI_JOIN = False
LOG_BASE_FIRST = False
RESOLVES_IDENTIFIERS_AS_UPPERCASE: Optional[bool] = None
NORMALIZE_FUNCTIONS: bool | str = False
TIME_MAPPING: Dict[str, str] = {'%D': '%m/%d/%y'}
ESCAPE_SEQUENCES: Dict[str, str] = {'\\a': '\x07', '\\b': '\x08', '\\f': '\x0c', '\\n': '\n', '\\r': '\r', '\\t': '\t', '\\v': '\x0b'}
FORMAT_MAPPING: Dict[str, str] = {'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
PSEUDOCOLUMNS: Set[str] = {'_PARTITIONTIME', '_PARTITIONDATE'}
@classmethod
def normalize_identifier(cls, expression: ~E) -> ~E:
263    @classmethod
264    def normalize_identifier(cls, expression: E) -> E:
265        if isinstance(expression, exp.Identifier):
266            parent = expression.parent
267            while isinstance(parent, exp.Dot):
268                parent = parent.parent
269
270            # In BigQuery, CTEs aren't case-sensitive, but table names are (by default, at least).
271            # The following check is essentially a heuristic to detect tables based on whether or
272            # not they're qualified. It also avoids normalizing UDFs, because they're case-sensitive.
273            if (
274                not isinstance(parent, exp.UserDefinedFunction)
275                and not (isinstance(parent, exp.Table) and parent.db)
276                and not expression.meta.get("is_table")
277            ):
278                expression.set("this", expression.this.lower())
279
280        return expression

Normalizes an unquoted identifier to either lower or upper case, thus essentially making it case-insensitive. If a dialect treats all identifiers as case-insensitive, they will be normalized to lowercase regardless of being quoted or not.

tokenizer_class = <class 'BigQuery.Tokenizer'>
parser_class = <class 'BigQuery.Parser'>
generator_class = <class 'BigQuery.Generator'>
TIME_TRIE: Dict = {'%': {'D': {0: True}}}
FORMAT_TRIE: Dict = {'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
INVERSE_TIME_MAPPING: Dict[str, str] = {'%m/%d/%y': '%D'}
INVERSE_TIME_TRIE: Dict = {'%': {'m': {'/': {'%': {'d': {'/': {'%': {'y': {0: True}}}}}}}}}
INVERSE_ESCAPE_SEQUENCES: Dict[str, str] = {'\x07': '\\a', '\x08': '\\b', '\x0c': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0b': '\\v'}
QUOTE_START = "'"
QUOTE_END = "'"
IDENTIFIER_START = '`'
IDENTIFIER_END = '`'
BIT_START = None
BIT_END = None
HEX_START = '0x'
HEX_END = ''
BYTE_START = "b'"
BYTE_END = "'"
class BigQuery.Tokenizer(sqlglot.tokens.Tokenizer):
282    class Tokenizer(tokens.Tokenizer):
283        QUOTES = ["'", '"', '"""', "'''"]
284        COMMENTS = ["--", "#", ("/*", "*/")]
285        IDENTIFIERS = ["`"]
286        STRING_ESCAPES = ["\\"]
287
288        HEX_STRINGS = [("0x", ""), ("0X", "")]
289
290        BYTE_STRINGS = [
291            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("b", "B")
292        ]
293
294        RAW_STRINGS = [
295            (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
296        ]
297
298        KEYWORDS = {
299            **tokens.Tokenizer.KEYWORDS,
300            "ANY TYPE": TokenType.VARIANT,
301            "BEGIN": TokenType.COMMAND,
302            "BEGIN TRANSACTION": TokenType.BEGIN,
303            "BYTES": TokenType.BINARY,
304            "CURRENT_DATETIME": TokenType.CURRENT_DATETIME,
305            "DECLARE": TokenType.COMMAND,
306            "FLOAT64": TokenType.DOUBLE,
307            "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT,
308            "MODEL": TokenType.MODEL,
309            "NOT DETERMINISTIC": TokenType.VOLATILE,
310            "RECORD": TokenType.STRUCT,
311            "TIMESTAMP": TokenType.TIMESTAMPTZ,
312        }
313        KEYWORDS.pop("DIV")
QUOTES = ["'", '"', '"""', "'''"]
COMMENTS = ['--', '#', ('/*', '*/')]
IDENTIFIERS = ['`']
STRING_ESCAPES = ['\\']
HEX_STRINGS = [('0x', ''), ('0X', '')]
BYTE_STRINGS = [("b'", "'"), ("B'", "'"), ('b"', '"'), ('B"', '"'), ('b"""', '"""'), ('B"""', '"""'), ("b'''", "'''"), ("B'''", "'''")]
RAW_STRINGS = [("r'", "'"), ("R'", "'"), ('r"', '"'), ('R"', '"'), ('r"""', '"""'), ('R"""', '"""'), ("r'''", "'''"), ("R'''", "'''")]
KEYWORDS = {'{%': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{%-': <TokenType.BLOCK_START: 'BLOCK_START'>, '%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '+%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-%}': <TokenType.BLOCK_END: 'BLOCK_END'>, '{{+': <TokenType.BLOCK_START: 'BLOCK_START'>, '{{-': <TokenType.BLOCK_START: 'BLOCK_START'>, '+}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '-}}': <TokenType.BLOCK_END: 'BLOCK_END'>, '/*+': <TokenType.HINT: 'HINT'>, '==': <TokenType.EQ: 'EQ'>, '::': <TokenType.DCOLON: 'DCOLON'>, '||': <TokenType.DPIPE: 'DPIPE'>, '>=': <TokenType.GTE: 'GTE'>, '<=': <TokenType.LTE: 'LTE'>, '<>': <TokenType.NEQ: 'NEQ'>, '!=': <TokenType.NEQ: 'NEQ'>, ':=': <TokenType.COLON_EQ: 'COLON_EQ'>, '<=>': <TokenType.NULLSAFE_EQ: 'NULLSAFE_EQ'>, '->': <TokenType.ARROW: 'ARROW'>, '->>': <TokenType.DARROW: 'DARROW'>, '=>': <TokenType.FARROW: 'FARROW'>, '#>': <TokenType.HASH_ARROW: 'HASH_ARROW'>, '#>>': <TokenType.DHASH_ARROW: 'DHASH_ARROW'>, '<->': <TokenType.LR_ARROW: 'LR_ARROW'>, '&&': <TokenType.DAMP: 'DAMP'>, '??': <TokenType.DQMARK: 'DQMARK'>, 'ALL': <TokenType.ALL: 'ALL'>, 'ALWAYS': <TokenType.ALWAYS: 'ALWAYS'>, 'AND': <TokenType.AND: 'AND'>, 'ANTI': <TokenType.ANTI: 'ANTI'>, 'ANY': <TokenType.ANY: 'ANY'>, 'ASC': <TokenType.ASC: 'ASC'>, 'AS': <TokenType.ALIAS: 'ALIAS'>, 'ASOF': <TokenType.ASOF: 'ASOF'>, 'AUTOINCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'AUTO_INCREMENT': <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, 'BEGIN': <TokenType.COMMAND: 'COMMAND'>, 'BETWEEN': <TokenType.BETWEEN: 'BETWEEN'>, 'CACHE': <TokenType.CACHE: 'CACHE'>, 'UNCACHE': <TokenType.UNCACHE: 'UNCACHE'>, 'CASE': <TokenType.CASE: 'CASE'>, 'CHARACTER SET': <TokenType.CHARACTER_SET: 'CHARACTER_SET'>, 'CLUSTER BY': <TokenType.CLUSTER_BY: 'CLUSTER_BY'>, 'COLLATE': <TokenType.COLLATE: 'COLLATE'>, 'COLUMN': <TokenType.COLUMN: 'COLUMN'>, 'COMMIT': <TokenType.COMMIT: 'COMMIT'>, 'CONNECT BY': <TokenType.CONNECT_BY: 'CONNECT_BY'>, 'CONSTRAINT': <TokenType.CONSTRAINT: 'CONSTRAINT'>, 'CREATE': <TokenType.CREATE: 'CREATE'>, 'CROSS': <TokenType.CROSS: 'CROSS'>, 'CUBE': <TokenType.CUBE: 'CUBE'>, 'CURRENT_DATE': <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, 'CURRENT_TIME': <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, 'CURRENT_TIMESTAMP': <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, 'CURRENT_USER': <TokenType.CURRENT_USER: 'CURRENT_USER'>, 'DATABASE': <TokenType.DATABASE: 'DATABASE'>, 'DEFAULT': <TokenType.DEFAULT: 'DEFAULT'>, 'DELETE': <TokenType.DELETE: 'DELETE'>, 'DESC': <TokenType.DESC: 'DESC'>, 'DESCRIBE': <TokenType.DESCRIBE: 'DESCRIBE'>, 'DISTINCT': <TokenType.DISTINCT: 'DISTINCT'>, 'DISTRIBUTE BY': <TokenType.DISTRIBUTE_BY: 'DISTRIBUTE_BY'>, 'DROP': <TokenType.DROP: 'DROP'>, 'ELSE': <TokenType.ELSE: 'ELSE'>, 'END': <TokenType.END: 'END'>, 'ESCAPE': <TokenType.ESCAPE: 'ESCAPE'>, 'EXCEPT': <TokenType.EXCEPT: 'EXCEPT'>, 'EXECUTE': <TokenType.EXECUTE: 'EXECUTE'>, 'EXISTS': <TokenType.EXISTS: 'EXISTS'>, 'FALSE': <TokenType.FALSE: 'FALSE'>, 'FETCH': <TokenType.FETCH: 'FETCH'>, 'FILTER': <TokenType.FILTER: 'FILTER'>, 'FIRST': <TokenType.FIRST: 'FIRST'>, 'FULL': <TokenType.FULL: 'FULL'>, 'FUNCTION': <TokenType.FUNCTION: 'FUNCTION'>, 'FOR': <TokenType.FOR: 'FOR'>, 'FOREIGN KEY': <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, 'FORMAT': <TokenType.FORMAT: 'FORMAT'>, 'FROM': <TokenType.FROM: 'FROM'>, 'GEOGRAPHY': <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, 'GEOMETRY': <TokenType.GEOMETRY: 'GEOMETRY'>, 'GLOB': <TokenType.GLOB: 'GLOB'>, 'GROUP BY': <TokenType.GROUP_BY: 'GROUP_BY'>, 'GROUPING SETS': <TokenType.GROUPING_SETS: 'GROUPING_SETS'>, 'HAVING': <TokenType.HAVING: 'HAVING'>, 'ILIKE': <TokenType.ILIKE: 'ILIKE'>, 'IN': <TokenType.IN: 'IN'>, 'INDEX': <TokenType.INDEX: 'INDEX'>, 'INET': <TokenType.INET: 'INET'>, 'INNER': <TokenType.INNER: 'INNER'>, 'INSERT': <TokenType.INSERT: 'INSERT'>, 'INTERVAL': <TokenType.INTERVAL: 'INTERVAL'>, 'INTERSECT': <TokenType.INTERSECT: 'INTERSECT'>, 'INTO': <TokenType.INTO: 'INTO'>, 'IS': <TokenType.IS: 'IS'>, 'ISNULL': <TokenType.ISNULL: 'ISNULL'>, 'JOIN': <TokenType.JOIN: 'JOIN'>, 'KEEP': <TokenType.KEEP: 'KEEP'>, 'KILL': <TokenType.KILL: 'KILL'>, 'LATERAL': <TokenType.LATERAL: 'LATERAL'>, 'LEFT': <TokenType.LEFT: 'LEFT'>, 'LIKE': <TokenType.LIKE: 'LIKE'>, 'LIMIT': <TokenType.LIMIT: 'LIMIT'>, 'LOAD': <TokenType.LOAD: 'LOAD'>, 'LOCK': <TokenType.LOCK: 'LOCK'>, 'MERGE': <TokenType.MERGE: 'MERGE'>, 'NATURAL': <TokenType.NATURAL: 'NATURAL'>, 'NEXT': <TokenType.NEXT: 'NEXT'>, 'NOT': <TokenType.NOT: 'NOT'>, 'NOTNULL': <TokenType.NOTNULL: 'NOTNULL'>, 'NULL': <TokenType.NULL: 'NULL'>, 'OBJECT': <TokenType.OBJECT: 'OBJECT'>, 'OFFSET': <TokenType.OFFSET: 'OFFSET'>, 'ON': <TokenType.ON: 'ON'>, 'OR': <TokenType.OR: 'OR'>, 'XOR': <TokenType.XOR: 'XOR'>, 'ORDER BY': <TokenType.ORDER_BY: 'ORDER_BY'>, 'ORDINALITY': <TokenType.ORDINALITY: 'ORDINALITY'>, 'OUTER': <TokenType.OUTER: 'OUTER'>, 'OVER': <TokenType.OVER: 'OVER'>, 'OVERLAPS': <TokenType.OVERLAPS: 'OVERLAPS'>, 'OVERWRITE': <TokenType.OVERWRITE: 'OVERWRITE'>, 'PARTITION': <TokenType.PARTITION: 'PARTITION'>, 'PARTITION BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PARTITIONED_BY': <TokenType.PARTITION_BY: 'PARTITION_BY'>, 'PERCENT': <TokenType.PERCENT: 'PERCENT'>, 'PIVOT': <TokenType.PIVOT: 'PIVOT'>, 'PRAGMA': <TokenType.PRAGMA: 'PRAGMA'>, 'PRIMARY KEY': <TokenType.PRIMARY_KEY: 'PRIMARY_KEY'>, 'PROCEDURE': <TokenType.PROCEDURE: 'PROCEDURE'>, 'QUALIFY': <TokenType.QUALIFY: 'QUALIFY'>, 'RANGE': <TokenType.RANGE: 'RANGE'>, 'RECURSIVE': <TokenType.RECURSIVE: 'RECURSIVE'>, 'REGEXP': <TokenType.RLIKE: 'RLIKE'>, 'REPLACE': <TokenType.REPLACE: 'REPLACE'>, 'RETURNING': <TokenType.RETURNING: 'RETURNING'>, 'REFERENCES': <TokenType.REFERENCES: 'REFERENCES'>, 'RIGHT': <TokenType.RIGHT: 'RIGHT'>, 'RLIKE': <TokenType.RLIKE: 'RLIKE'>, 'ROLLBACK': <TokenType.ROLLBACK: 'ROLLBACK'>, 'ROLLUP': <TokenType.ROLLUP: 'ROLLUP'>, 'ROW': <TokenType.ROW: 'ROW'>, 'ROWS': <TokenType.ROWS: 'ROWS'>, 'SCHEMA': <TokenType.SCHEMA: 'SCHEMA'>, 'SELECT': <TokenType.SELECT: 'SELECT'>, 'SEMI': <TokenType.SEMI: 'SEMI'>, 'SET': <TokenType.SET: 'SET'>, 'SETTINGS': <TokenType.SETTINGS: 'SETTINGS'>, 'SHOW': <TokenType.SHOW: 'SHOW'>, 'SIMILAR TO': <TokenType.SIMILAR_TO: 'SIMILAR_TO'>, 'SOME': <TokenType.SOME: 'SOME'>, 'SORT BY': <TokenType.SORT_BY: 'SORT_BY'>, 'START WITH': <TokenType.START_WITH: 'START_WITH'>, 'TABLE': <TokenType.TABLE: 'TABLE'>, 'TABLESAMPLE': <TokenType.TABLE_SAMPLE: 'TABLE_SAMPLE'>, 'TEMP': <TokenType.TEMPORARY: 'TEMPORARY'>, 'TEMPORARY': <TokenType.TEMPORARY: 'TEMPORARY'>, 'THEN': <TokenType.THEN: 'THEN'>, 'TRUE': <TokenType.TRUE: 'TRUE'>, 'UNION': <TokenType.UNION: 'UNION'>, 'UNKNOWN': <TokenType.UNKNOWN: 'UNKNOWN'>, 'UNNEST': <TokenType.UNNEST: 'UNNEST'>, 'UNPIVOT': <TokenType.UNPIVOT: 'UNPIVOT'>, 'UPDATE': <TokenType.UPDATE: 'UPDATE'>, 'USE': <TokenType.USE: 'USE'>, 'USING': <TokenType.USING: 'USING'>, 'UUID': <TokenType.UUID: 'UUID'>, 'VALUES': <TokenType.VALUES: 'VALUES'>, 'VIEW': <TokenType.VIEW: 'VIEW'>, 'VOLATILE': <TokenType.VOLATILE: 'VOLATILE'>, 'WHEN': <TokenType.WHEN: 'WHEN'>, 'WHERE': <TokenType.WHERE: 'WHERE'>, 'WINDOW': <TokenType.WINDOW: 'WINDOW'>, 'WITH': <TokenType.WITH: 'WITH'>, 'APPLY': <TokenType.APPLY: 'APPLY'>, 'ARRAY': <TokenType.ARRAY: 'ARRAY'>, 'BIT': <TokenType.BIT: 'BIT'>, 'BOOL': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BOOLEAN': <TokenType.BOOLEAN: 'BOOLEAN'>, 'BYTE': <TokenType.TINYINT: 'TINYINT'>, 'MEDIUMINT': <TokenType.MEDIUMINT: 'MEDIUMINT'>, 'INT1': <TokenType.TINYINT: 'TINYINT'>, 'TINYINT': <TokenType.TINYINT: 'TINYINT'>, 'INT16': <TokenType.SMALLINT: 'SMALLINT'>, 'SHORT': <TokenType.SMALLINT: 'SMALLINT'>, 'SMALLINT': <TokenType.SMALLINT: 'SMALLINT'>, 'INT128': <TokenType.INT128: 'INT128'>, 'HUGEINT': <TokenType.INT128: 'INT128'>, 'INT2': <TokenType.SMALLINT: 'SMALLINT'>, 'INTEGER': <TokenType.INT: 'INT'>, 'INT': <TokenType.INT: 'INT'>, 'INT4': <TokenType.INT: 'INT'>, 'INT32': <TokenType.INT: 'INT'>, 'INT64': <TokenType.BIGINT: 'BIGINT'>, 'LONG': <TokenType.BIGINT: 'BIGINT'>, 'BIGINT': <TokenType.BIGINT: 'BIGINT'>, 'INT8': <TokenType.TINYINT: 'TINYINT'>, 'DEC': <TokenType.DECIMAL: 'DECIMAL'>, 'DECIMAL': <TokenType.DECIMAL: 'DECIMAL'>, 'BIGDECIMAL': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'BIGNUMERIC': <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, 'MAP': <TokenType.MAP: 'MAP'>, 'NULLABLE': <TokenType.NULLABLE: 'NULLABLE'>, 'NUMBER': <TokenType.DECIMAL: 'DECIMAL'>, 'NUMERIC': <TokenType.DECIMAL: 'DECIMAL'>, 'FIXED': <TokenType.DECIMAL: 'DECIMAL'>, 'REAL': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT4': <TokenType.FLOAT: 'FLOAT'>, 'FLOAT8': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE': <TokenType.DOUBLE: 'DOUBLE'>, 'DOUBLE PRECISION': <TokenType.DOUBLE: 'DOUBLE'>, 'JSON': <TokenType.JSON: 'JSON'>, 'CHAR': <TokenType.CHAR: 'CHAR'>, 'CHARACTER': <TokenType.CHAR: 'CHAR'>, 'NCHAR': <TokenType.NCHAR: 'NCHAR'>, 'VARCHAR': <TokenType.VARCHAR: 'VARCHAR'>, 'VARCHAR2': <TokenType.VARCHAR: 'VARCHAR'>, 'NVARCHAR': <TokenType.NVARCHAR: 'NVARCHAR'>, 'NVARCHAR2': <TokenType.NVARCHAR: 'NVARCHAR'>, 'STR': <TokenType.TEXT: 'TEXT'>, 'STRING': <TokenType.TEXT: 'TEXT'>, 'TEXT': <TokenType.TEXT: 'TEXT'>, 'LONGTEXT': <TokenType.LONGTEXT: 'LONGTEXT'>, 'MEDIUMTEXT': <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, 'TINYTEXT': <TokenType.TINYTEXT: 'TINYTEXT'>, 'CLOB': <TokenType.TEXT: 'TEXT'>, 'LONGVARCHAR': <TokenType.TEXT: 'TEXT'>, 'BINARY': <TokenType.BINARY: 'BINARY'>, 'BLOB': <TokenType.VARBINARY: 'VARBINARY'>, 'LONGBLOB': <TokenType.LONGBLOB: 'LONGBLOB'>, 'MEDIUMBLOB': <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, 'TINYBLOB': <TokenType.TINYBLOB: 'TINYBLOB'>, 'BYTEA': <TokenType.VARBINARY: 'VARBINARY'>, 'VARBINARY': <TokenType.VARBINARY: 'VARBINARY'>, 'TIME': <TokenType.TIME: 'TIME'>, 'TIMETZ': <TokenType.TIMETZ: 'TIMETZ'>, 'TIMESTAMP': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPTZ': <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, 'TIMESTAMPLTZ': <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, 'DATE': <TokenType.DATE: 'DATE'>, 'DATETIME': <TokenType.DATETIME: 'DATETIME'>, 'INT4RANGE': <TokenType.INT4RANGE: 'INT4RANGE'>, 'INT4MULTIRANGE': <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, 'INT8RANGE': <TokenType.INT8RANGE: 'INT8RANGE'>, 'INT8MULTIRANGE': <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, 'NUMRANGE': <TokenType.NUMRANGE: 'NUMRANGE'>, 'NUMMULTIRANGE': <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, 'TSRANGE': <TokenType.TSRANGE: 'TSRANGE'>, 'TSMULTIRANGE': <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, 'TSTZRANGE': <TokenType.TSTZRANGE: 'TSTZRANGE'>, 'TSTZMULTIRANGE': <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, 'DATERANGE': <TokenType.DATERANGE: 'DATERANGE'>, 'DATEMULTIRANGE': <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, 'UNIQUE': <TokenType.UNIQUE: 'UNIQUE'>, 'STRUCT': <TokenType.STRUCT: 'STRUCT'>, 'VARIANT': <TokenType.VARIANT: 'VARIANT'>, 'ALTER': <TokenType.ALTER: 'ALTER'>, 'ANALYZE': <TokenType.COMMAND: 'COMMAND'>, 'CALL': <TokenType.COMMAND: 'COMMAND'>, 'COMMENT': <TokenType.COMMENT: 'COMMENT'>, 'COPY': <TokenType.COMMAND: 'COMMAND'>, 'EXPLAIN': <TokenType.COMMAND: 'COMMAND'>, 'GRANT': <TokenType.COMMAND: 'COMMAND'>, 'OPTIMIZE': <TokenType.COMMAND: 'COMMAND'>, 'PREPARE': <TokenType.COMMAND: 'COMMAND'>, 'TRUNCATE': <TokenType.COMMAND: 'COMMAND'>, 'VACUUM': <TokenType.COMMAND: 'COMMAND'>, 'USER-DEFINED': <TokenType.USERDEFINED: 'USERDEFINED'>, 'FOR VERSION': <TokenType.VERSION_SNAPSHOT: 'VERSION_SNAPSHOT'>, 'FOR TIMESTAMP': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'ANY TYPE': <TokenType.VARIANT: 'VARIANT'>, 'BEGIN TRANSACTION': <TokenType.BEGIN: 'BEGIN'>, 'BYTES': <TokenType.BINARY: 'BINARY'>, 'CURRENT_DATETIME': <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, 'DECLARE': <TokenType.COMMAND: 'COMMAND'>, 'FLOAT64': <TokenType.DOUBLE: 'DOUBLE'>, 'FOR SYSTEM_TIME': <TokenType.TIMESTAMP_SNAPSHOT: 'TIMESTAMP_SNAPSHOT'>, 'MODEL': <TokenType.MODEL: 'MODEL'>, 'NOT DETERMINISTIC': <TokenType.VOLATILE: 'VOLATILE'>, 'RECORD': <TokenType.STRUCT: 'STRUCT'>}
ESCAPE_SEQUENCES: Dict[str, str] = {'\\a': '\x07', '\\b': '\x08', '\\f': '\x0c', '\\n': '\n', '\\r': '\r', '\\t': '\t', '\\v': '\x0b'}
class BigQuery.Parser(sqlglot.parser.Parser):
315    class Parser(parser.Parser):
316        PREFIXED_PIVOT_COLUMNS = True
317
318        LOG_DEFAULTS_TO_LN = True
319
320        FUNCTIONS = {
321            **parser.Parser.FUNCTIONS,
322            "DATE": _parse_date,
323            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
324            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
325            "DATE_TRUNC": lambda args: exp.DateTrunc(
326                unit=exp.Literal.string(str(seq_get(args, 1))),
327                this=seq_get(args, 0),
328            ),
329            "DATETIME_ADD": parse_date_delta_with_interval(exp.DatetimeAdd),
330            "DATETIME_SUB": parse_date_delta_with_interval(exp.DatetimeSub),
331            "DIV": binary_from_function(exp.IntDiv),
332            "GENERATE_ARRAY": exp.GenerateSeries.from_arg_list,
333            "MD5": exp.MD5Digest.from_arg_list,
334            "TO_HEX": _parse_to_hex,
335            "PARSE_DATE": lambda args: format_time_lambda(exp.StrToDate, "bigquery")(
336                [seq_get(args, 1), seq_get(args, 0)]
337            ),
338            "PARSE_TIMESTAMP": _parse_timestamp,
339            "REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
340            "REGEXP_EXTRACT": lambda args: exp.RegexpExtract(
341                this=seq_get(args, 0),
342                expression=seq_get(args, 1),
343                position=seq_get(args, 2),
344                occurrence=seq_get(args, 3),
345                group=exp.Literal.number(1) if re.compile(args[1].name).groups == 1 else None,
346            ),
347            "SHA256": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(256)),
348            "SHA512": lambda args: exp.SHA2(this=seq_get(args, 0), length=exp.Literal.number(512)),
349            "SPLIT": lambda args: exp.Split(
350                # https://cloud.google.com/bigquery/docs/reference/standard-sql/string_functions#split
351                this=seq_get(args, 0),
352                expression=seq_get(args, 1) or exp.Literal.string(","),
353            ),
354            "TIME_ADD": parse_date_delta_with_interval(exp.TimeAdd),
355            "TIME_SUB": parse_date_delta_with_interval(exp.TimeSub),
356            "TIMESTAMP_ADD": parse_date_delta_with_interval(exp.TimestampAdd),
357            "TIMESTAMP_SUB": parse_date_delta_with_interval(exp.TimestampSub),
358            "TIMESTAMP_MICROS": lambda args: exp.UnixToTime(
359                this=seq_get(args, 0), scale=exp.UnixToTime.MICROS
360            ),
361            "TIMESTAMP_MILLIS": lambda args: exp.UnixToTime(
362                this=seq_get(args, 0), scale=exp.UnixToTime.MILLIS
363            ),
364            "TIMESTAMP_SECONDS": lambda args: exp.UnixToTime(
365                this=seq_get(args, 0), scale=exp.UnixToTime.SECONDS
366            ),
367            "TO_JSON_STRING": exp.JSONFormat.from_arg_list,
368        }
369
370        FUNCTION_PARSERS = {
371            **parser.Parser.FUNCTION_PARSERS,
372            "ARRAY": lambda self: self.expression(exp.Array, expressions=[self._parse_statement()]),
373        }
374        FUNCTION_PARSERS.pop("TRIM")
375
376        NO_PAREN_FUNCTIONS = {
377            **parser.Parser.NO_PAREN_FUNCTIONS,
378            TokenType.CURRENT_DATETIME: exp.CurrentDatetime,
379        }
380
381        NESTED_TYPE_TOKENS = {
382            *parser.Parser.NESTED_TYPE_TOKENS,
383            TokenType.TABLE,
384        }
385
386        ID_VAR_TOKENS = {
387            *parser.Parser.ID_VAR_TOKENS,
388            TokenType.VALUES,
389        }
390
391        PROPERTY_PARSERS = {
392            **parser.Parser.PROPERTY_PARSERS,
393            "NOT DETERMINISTIC": lambda self: self.expression(
394                exp.StabilityProperty, this=exp.Literal.string("VOLATILE")
395            ),
396            "OPTIONS": lambda self: self._parse_with_property(),
397        }
398
399        CONSTRAINT_PARSERS = {
400            **parser.Parser.CONSTRAINT_PARSERS,
401            "OPTIONS": lambda self: exp.Properties(expressions=self._parse_with_property()),
402        }
403
404        RANGE_PARSERS = parser.Parser.RANGE_PARSERS.copy()
405        RANGE_PARSERS.pop(TokenType.OVERLAPS, None)
406
407        NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN}
408
409        STATEMENT_PARSERS = {
410            **parser.Parser.STATEMENT_PARSERS,
411            TokenType.END: lambda self: self._parse_as_command(self._prev),
412            TokenType.FOR: lambda self: self._parse_for_in(),
413        }
414
415        def _parse_for_in(self) -> exp.ForIn:
416            this = self._parse_range()
417            self._match_text_seq("DO")
418            return self.expression(exp.ForIn, this=this, expression=self._parse_statement())
419
420        def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
421            this = super()._parse_table_part(schema=schema) or self._parse_number()
422
423            # https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#table_names
424            if isinstance(this, exp.Identifier):
425                table_name = this.name
426                while self._match(TokenType.DASH, advance=False) and self._next:
427                    self._advance(2)
428                    table_name += f"-{self._prev.text}"
429
430                this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
431            elif isinstance(this, exp.Literal):
432                table_name = this.name
433
434                if (
435                    self._curr
436                    and self._prev.end == self._curr.start - 1
437                    and self._parse_var(any_token=True)
438                ):
439                    table_name += self._prev.text
440
441                this = exp.Identifier(this=table_name, quoted=True)
442
443            return this
444
445        def _parse_table_parts(self, schema: bool = False) -> exp.Table:
446            table = super()._parse_table_parts(schema=schema)
447            if isinstance(table.this, exp.Identifier) and "." in table.name:
448                catalog, db, this, *rest = (
449                    t.cast(t.Optional[exp.Expression], exp.to_identifier(x))
450                    for x in split_num_words(table.name, ".", 3)
451                )
452
453                if rest and this:
454                    this = exp.Dot.build(t.cast(t.List[exp.Expression], [this, *rest]))
455
456                table = exp.Table(this=this, db=db, catalog=catalog)
457
458            return table
459
460        def _parse_json_object(self) -> exp.JSONObject:
461            json_object = super()._parse_json_object()
462            array_kv_pair = seq_get(json_object.expressions, 0)
463
464            # Converts BQ's "signature 2" of JSON_OBJECT into SQLGlot's canonical representation
465            # https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_object_signature2
466            if (
467                array_kv_pair
468                and isinstance(array_kv_pair.this, exp.Array)
469                and isinstance(array_kv_pair.expression, exp.Array)
470            ):
471                keys = array_kv_pair.this.expressions
472                values = array_kv_pair.expression.expressions
473
474                json_object.set(
475                    "expressions",
476                    [exp.JSONKeyValue(this=k, expression=v) for k, v in zip(keys, values)],
477                )
478
479            return json_object

Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.

Arguments:
  • error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
  • error_message_context: Determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
  • max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
PREFIXED_PIVOT_COLUMNS = True
LOG_DEFAULTS_TO_LN = True
FUNCTIONS = {'ABS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Abs'>>, 'ANY_VALUE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.AnyValue'>>, 'APPROX_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_COUNT_DISTINCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxDistinct'>>, 'APPROX_QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxQuantile'>>, 'APPROX_TOP_K': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ApproxTopK'>>, 'ARG_MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARGMAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'MAX_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMax'>>, 'ARG_MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARGMIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'MIN_BY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArgMin'>>, 'ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Array'>>, 'ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAgg'>>, 'ARRAY_ALL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAll'>>, 'ARRAY_ANY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayAny'>>, 'ARRAY_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayConcat'>>, 'ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayContains'>>, 'FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_FILTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayFilter'>>, 'ARRAY_JOIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayJoin'>>, 'ARRAY_SIZE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySize'>>, 'ARRAY_SORT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySort'>>, 'ARRAY_SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArraySum'>>, 'ARRAY_UNION_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUnionAgg'>>, 'ARRAY_UNIQUE_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ArrayUniqueAgg'>>, 'AVG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Avg'>>, 'CASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Case'>>, 'CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Cast'>>, 'CAST_TO_STR_TYPE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CastToStrType'>>, 'CEIL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CEILING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ceil'>>, 'CHR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Chr'>>, 'COALESCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'IFNULL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'NVL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Coalesce'>>, 'COLLATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Collate'>>, 'CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Concat'>>, 'CONCAT_WS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ConcatWs'>>, 'COUNT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Count'>>, 'COUNT_IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CountIf'>>, 'CURRENT_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDate'>>, 'CURRENT_DATETIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentDatetime'>>, 'CURRENT_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTime'>>, 'CURRENT_TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentTimestamp'>>, 'CURRENT_USER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.CurrentUser'>>, 'DATE': <function _parse_date>, 'DATE_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATEDIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATE_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateDiff'>>, 'DATEFROMPARTS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateFromParts'>>, 'DATE_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateStrToDate'>>, 'DATE_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATE_TO_DATE_STR': <function Parser.<lambda>>, 'DATE_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DateToDi'>>, 'DATE_TRUNC': <function BigQuery.Parser.<lambda>>, 'DATETIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeDiff'>>, 'DATETIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'DATETIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DatetimeTrunc'>>, 'DAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Day'>>, 'DAY_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAYOFMONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfMonth'>>, 'DAY_OF_WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAYOFWEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfWeek'>>, 'DAY_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DAYOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DayOfYear'>>, 'DECODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Decode'>>, 'DI_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.DiToDate'>>, 'ENCODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Encode'>>, 'EXP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Exp'>>, 'EXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Explode'>>, 'EXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ExplodeOuter'>>, 'EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Extract'>>, 'FIRST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.First'>>, 'FLATTEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Flatten'>>, 'FLOOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Floor'>>, 'FROM_BASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase'>>, 'FROM_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.FromBase64'>>, 'GENERATE_SERIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'GREATEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Greatest'>>, 'GROUP_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GroupConcat'>>, 'HEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hex'>>, 'HLL': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Hll'>>, 'IF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.If'>>, 'INITCAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Initcap'>>, 'IS_INF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'ISINF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsInf'>>, 'IS_NAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'ISNAN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.IsNan'>>, 'J_S_O_N_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArray'>>, 'J_S_O_N_ARRAY_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayAgg'>>, 'JSON_ARRAY_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONArrayContains'>>, 'JSONB_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtract'>>, 'JSONB_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONBExtractScalar'>>, 'JSON_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtract'>>, 'JSON_EXTRACT_SCALAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONExtractScalar'>>, 'JSON_FORMAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>, 'J_S_O_N_OBJECT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONObject'>>, 'J_S_O_N_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONTable'>>, 'LAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Last'>>, 'LAST_DATE_OF_MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LastDateOfMonth'>>, 'LEAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Least'>>, 'LEFT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Left'>>, 'LENGTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Length'>>, 'LEVENSHTEIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Levenshtein'>>, 'LN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Ln'>>, 'LOG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log'>>, 'LOG10': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log10'>>, 'LOG2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Log2'>>, 'LOGICAL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOL_AND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'BOOLAND_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalAnd'>>, 'LOGICAL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOL_OR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'BOOLOR_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.LogicalOr'>>, 'LOWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'LCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Lower'>>, 'MD5': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MD5_DIGEST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MD5Digest'>>, 'MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Map'>>, 'MAP_FROM_ENTRIES': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MapFromEntries'>>, 'MATCH_AGAINST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MatchAgainst'>>, 'MAX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Max'>>, 'MIN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Min'>>, 'MONTH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Month'>>, 'MONTHS_BETWEEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.MonthsBetween'>>, 'NEXT_VALUE_FOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NextValueFor'>>, 'NULLIF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nullif'>>, 'NUMBER_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.NumberToStr'>>, 'NVL2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Nvl2'>>, 'OPEN_J_S_O_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.OpenJSON'>>, 'PARAMETERIZED_AGG': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParameterizedAgg'>>, 'PARSE_JSON': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'JSON_PARSE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ParseJSON'>>, 'PERCENTILE_CONT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileCont'>>, 'PERCENTILE_DISC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PercentileDisc'>>, 'POSEXPLODE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Posexplode'>>, 'POSEXPLODE_OUTER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.PosexplodeOuter'>>, 'POWER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'POW': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Pow'>>, 'PREDICT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Predict'>>, 'QUANTILE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Quantile'>>, 'RANGE_N': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RangeN'>>, 'READ_CSV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ReadCSV'>>, 'REDUCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Reduce'>>, 'REGEXP_EXTRACT': <function BigQuery.Parser.<lambda>>, 'REGEXP_I_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpILike'>>, 'REGEXP_LIKE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'REGEXP_REPLACE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpReplace'>>, 'REGEXP_SPLIT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpSplit'>>, 'REPEAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Repeat'>>, 'RIGHT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Right'>>, 'ROUND': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Round'>>, 'ROW_NUMBER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RowNumber'>>, 'SHA': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA1': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA'>>, 'SHA2': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SHA2'>>, 'SAFE_CONCAT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeConcat'>>, 'SAFE_DIVIDE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SafeDivide'>>, 'SORT_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.SortArray'>>, 'SPLIT': <function BigQuery.Parser.<lambda>>, 'SQRT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sqrt'>>, 'STANDARD_HASH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StandardHash'>>, 'STAR_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StarMap'>>, 'STARTS_WITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STARTSWITH': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StartsWith'>>, 'STDDEV': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stddev'>>, 'STDDEV_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevPop'>>, 'STDDEV_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StddevSamp'>>, 'STR_POSITION': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrPosition'>>, 'STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToDate'>>, 'STR_TO_MAP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToMap'>>, 'STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToTime'>>, 'STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StrToUnix'>>, 'STRUCT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Struct'>>, 'STRUCT_EXTRACT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.StructExtract'>>, 'STUFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'INSERT': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Stuff'>>, 'SUBSTRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Substring'>>, 'SUM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Sum'>>, 'TIME_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeDiff'>>, 'TIME_STR_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToDate'>>, 'TIME_STR_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToTime'>>, 'TIME_STR_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeStrToUnix'>>, 'TIME_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIME_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToStr'>>, 'TIME_TO_TIME_STR': <function Parser.<lambda>>, 'TIME_TO_UNIX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeToUnix'>>, 'TIME_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimeTrunc'>>, 'TIMESTAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Timestamp'>>, 'TIMESTAMP_ADD': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampDiff'>>, 'TIMESTAMP_SUB': <function parse_date_delta_with_interval.<locals>.func>, 'TIMESTAMP_TRUNC': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TimestampTrunc'>>, 'TO_BASE64': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToBase64'>>, 'TO_CHAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToChar'>>, 'TO_DAYS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.ToDays'>>, 'TRANSFORM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Transform'>>, 'TRIM': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Trim'>>, 'TRY_CAST': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TryCast'>>, 'TS_OR_DI_TO_DI': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDiToDi'>>, 'TS_OR_DS_ADD': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsAdd'>>, 'TS_OR_DS_DIFF': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsDiff'>>, 'TS_OR_DS_TO_DATE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.TsOrDsToDate'>>, 'TS_OR_DS_TO_DATE_STR': <function Parser.<lambda>>, 'UNHEX': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Unhex'>>, 'UNIX_TO_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToStr'>>, 'UNIX_TO_TIME': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTime'>>, 'UNIX_TO_TIME_STR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.UnixToTimeStr'>>, 'UPPER': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'UCASE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Upper'>>, 'VAR_MAP': <function parse_var_map>, 'VARIANCE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VAR_SAMP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Variance'>>, 'VARIANCE_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'VAR_POP': <bound method Func.from_arg_list of <class 'sqlglot.expressions.VariancePop'>>, 'WEEK': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Week'>>, 'WEEK_OF_YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WEEKOFYEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.WeekOfYear'>>, 'WHEN': <bound method Func.from_arg_list of <class 'sqlglot.expressions.When'>>, 'X_M_L_TABLE': <bound method Func.from_arg_list of <class 'sqlglot.expressions.XMLTable'>>, 'XOR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Xor'>>, 'YEAR': <bound method Func.from_arg_list of <class 'sqlglot.expressions.Year'>>, 'GLOB': <function Parser.<lambda>>, 'LIKE': <function parse_like>, 'DIV': <function binary_from_function.<locals>.<lambda>>, 'GENERATE_ARRAY': <bound method Func.from_arg_list of <class 'sqlglot.expressions.GenerateSeries'>>, 'TO_HEX': <function _parse_to_hex>, 'PARSE_DATE': <function BigQuery.Parser.<lambda>>, 'PARSE_TIMESTAMP': <function _parse_timestamp>, 'REGEXP_CONTAINS': <bound method Func.from_arg_list of <class 'sqlglot.expressions.RegexpLike'>>, 'SHA256': <function BigQuery.Parser.<lambda>>, 'SHA512': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_MICROS': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_MILLIS': <function BigQuery.Parser.<lambda>>, 'TIMESTAMP_SECONDS': <function BigQuery.Parser.<lambda>>, 'TO_JSON_STRING': <bound method Func.from_arg_list of <class 'sqlglot.expressions.JSONFormat'>>}
FUNCTION_PARSERS = {'ANY_VALUE': <function Parser.<lambda>>, 'CAST': <function Parser.<lambda>>, 'CONCAT': <function Parser.<lambda>>, 'CONCAT_WS': <function Parser.<lambda>>, 'CONVERT': <function Parser.<lambda>>, 'DECODE': <function Parser.<lambda>>, 'EXTRACT': <function Parser.<lambda>>, 'JSON_OBJECT': <function Parser.<lambda>>, 'JSON_TABLE': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATCH': <function Parser.<lambda>>, 'OPENJSON': <function Parser.<lambda>>, 'POSITION': <function Parser.<lambda>>, 'PREDICT': <function Parser.<lambda>>, 'SAFE_CAST': <function Parser.<lambda>>, 'STRING_AGG': <function Parser.<lambda>>, 'SUBSTRING': <function Parser.<lambda>>, 'TRY_CAST': <function Parser.<lambda>>, 'TRY_CONVERT': <function Parser.<lambda>>, 'ARRAY': <function BigQuery.Parser.<lambda>>}
NO_PAREN_FUNCTIONS = {<TokenType.CURRENT_DATE: 'CURRENT_DATE'>: <class 'sqlglot.expressions.CurrentDate'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>: <class 'sqlglot.expressions.CurrentDatetime'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>: <class 'sqlglot.expressions.CurrentTime'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>: <class 'sqlglot.expressions.CurrentTimestamp'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>: <class 'sqlglot.expressions.CurrentUser'>}
NESTED_TYPE_TOKENS = {<TokenType.TABLE: 'TABLE'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.NESTED: 'NESTED'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.MAP: 'MAP'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>}
ID_VAR_TOKENS = {<TokenType.COMMENT: 'COMMENT'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.FALSE: 'FALSE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.UUID: 'UUID'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.XML: 'XML'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.SHOW: 'SHOW'>, <TokenType.BINARY: 'BINARY'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.NATURAL: 'NATURAL'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.UINT: 'UINT'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.ANY: 'ANY'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.ROWS: 'ROWS'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.INT: 'INT'>, <TokenType.BIT: 'BIT'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.WINDOW: 'WINDOW'>, <TokenType.RANGE: 'RANGE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.DESC: 'DESC'>, <TokenType.INET: 'INET'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.OFFSET: 'OFFSET'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DIV: 'DIV'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.SET: 'SET'>, <TokenType.LEFT: 'LEFT'>, <TokenType.SEMI: 'SEMI'>, <TokenType.APPLY: 'APPLY'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.ALL: 'ALL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.ASC: 'ASC'>, <TokenType.SUPER: 'SUPER'>, <TokenType.NEXT: 'NEXT'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.YEAR: 'YEAR'>, <TokenType.MERGE: 'MERGE'>, <TokenType.MODEL: 'MODEL'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.VIEW: 'VIEW'>, <TokenType.MAP: 'MAP'>, <TokenType.KILL: 'KILL'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.TABLE: 'TABLE'>, <TokenType.VALUES: 'VALUES'>, <TokenType.UINT128: 'UINT128'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.CACHE: 'CACHE'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.INT256: 'INT256'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.RIGHT: 'RIGHT'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.IS: 'IS'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VAR: 'VAR'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.JSONB: 'JSONB'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.FILTER: 'FILTER'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.USE: 'USE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.SOME: 'SOME'>, <TokenType.JSON: 'JSON'>, <TokenType.NESTED: 'NESTED'>, <TokenType.END: 'END'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.UINT256: 'UINT256'>, <TokenType.ENUM: 'ENUM'>, <TokenType.CASE: 'CASE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.FULL: 'FULL'>, <TokenType.FIRST: 'FIRST'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.TEXT: 'TEXT'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.TOP: 'TOP'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.KEEP: 'KEEP'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT128: 'INT128'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.NULL: 'NULL'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.LOAD: 'LOAD'>, <TokenType.ANTI: 'ANTI'>, <TokenType.TIME: 'TIME'>, <TokenType.ROW: 'ROW'>}
PROPERTY_PARSERS = {'ALGORITHM': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'BLOCKCOMPRESSION': <function Parser.<lambda>>, 'CHARSET': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECKSUM': <function Parser.<lambda>>, 'CLUSTER BY': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COPY': <function Parser.<lambda>>, 'DATABLOCKSIZE': <function Parser.<lambda>>, 'DEFINER': <function Parser.<lambda>>, 'DETERMINISTIC': <function Parser.<lambda>>, 'DISTKEY': <function Parser.<lambda>>, 'DISTSTYLE': <function Parser.<lambda>>, 'ENGINE': <function Parser.<lambda>>, 'EXECUTE': <function Parser.<lambda>>, 'EXTERNAL': <function Parser.<lambda>>, 'FALLBACK': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'FREESPACE': <function Parser.<lambda>>, 'HEAP': <function Parser.<lambda>>, 'IMMUTABLE': <function Parser.<lambda>>, 'INPUT': <function Parser.<lambda>>, 'JOURNAL': <function Parser.<lambda>>, 'LANGUAGE': <function Parser.<lambda>>, 'LAYOUT': <function Parser.<lambda>>, 'LIFETIME': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'LOCATION': <function Parser.<lambda>>, 'LOCK': <function Parser.<lambda>>, 'LOCKING': <function Parser.<lambda>>, 'LOG': <function Parser.<lambda>>, 'MATERIALIZED': <function Parser.<lambda>>, 'MERGEBLOCKRATIO': <function Parser.<lambda>>, 'MULTISET': <function Parser.<lambda>>, 'NO': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'ORDER BY': <function Parser.<lambda>>, 'OUTPUT': <function Parser.<lambda>>, 'PARTITION': <function Parser.<lambda>>, 'PARTITION BY': <function Parser.<lambda>>, 'PARTITIONED BY': <function Parser.<lambda>>, 'PARTITIONED_BY': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'RANGE': <function Parser.<lambda>>, 'REMOTE': <function Parser.<lambda>>, 'RETURNS': <function Parser.<lambda>>, 'ROW': <function Parser.<lambda>>, 'ROW_FORMAT': <function Parser.<lambda>>, 'SAMPLE': <function Parser.<lambda>>, 'SET': <function Parser.<lambda>>, 'SETTINGS': <function Parser.<lambda>>, 'SORTKEY': <function Parser.<lambda>>, 'SOURCE': <function Parser.<lambda>>, 'STABLE': <function Parser.<lambda>>, 'STORED': <function Parser.<lambda>>, 'SYSTEM_VERSIONING': <function Parser.<lambda>>, 'TBLPROPERTIES': <function Parser.<lambda>>, 'TEMP': <function Parser.<lambda>>, 'TEMPORARY': <function Parser.<lambda>>, 'TO': <function Parser.<lambda>>, 'TRANSIENT': <function Parser.<lambda>>, 'TRANSFORM': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'USING': <function Parser.<lambda>>, 'VOLATILE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'NOT DETERMINISTIC': <function BigQuery.Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
CONSTRAINT_PARSERS = {'AUTOINCREMENT': <function Parser.<lambda>>, 'AUTO_INCREMENT': <function Parser.<lambda>>, 'CASESPECIFIC': <function Parser.<lambda>>, 'CHARACTER SET': <function Parser.<lambda>>, 'CHECK': <function Parser.<lambda>>, 'COLLATE': <function Parser.<lambda>>, 'COMMENT': <function Parser.<lambda>>, 'COMPRESS': <function Parser.<lambda>>, 'CLUSTERED': <function Parser.<lambda>>, 'NONCLUSTERED': <function Parser.<lambda>>, 'DEFAULT': <function Parser.<lambda>>, 'ENCODE': <function Parser.<lambda>>, 'FOREIGN KEY': <function Parser.<lambda>>, 'FORMAT': <function Parser.<lambda>>, 'GENERATED': <function Parser.<lambda>>, 'IDENTITY': <function Parser.<lambda>>, 'INLINE': <function Parser.<lambda>>, 'LIKE': <function Parser.<lambda>>, 'NOT': <function Parser.<lambda>>, 'NULL': <function Parser.<lambda>>, 'ON': <function Parser.<lambda>>, 'PATH': <function Parser.<lambda>>, 'PERIOD': <function Parser.<lambda>>, 'PRIMARY KEY': <function Parser.<lambda>>, 'REFERENCES': <function Parser.<lambda>>, 'TITLE': <function Parser.<lambda>>, 'TTL': <function Parser.<lambda>>, 'UNIQUE': <function Parser.<lambda>>, 'UPPERCASE': <function Parser.<lambda>>, 'WITH': <function Parser.<lambda>>, 'OPTIONS': <function BigQuery.Parser.<lambda>>}
RANGE_PARSERS = {<TokenType.BETWEEN: 'BETWEEN'>: <function Parser.<lambda>>, <TokenType.GLOB: 'GLOB'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.ILIKE: 'ILIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.IN: 'IN'>: <function Parser.<lambda>>, <TokenType.IRLIKE: 'IRLIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.IS: 'IS'>: <function Parser.<lambda>>, <TokenType.LIKE: 'LIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.RLIKE: 'RLIKE'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.SIMILAR_TO: 'SIMILAR_TO'>: <function binary_range_parser.<locals>.<lambda>>, <TokenType.FOR: 'FOR'>: <function Parser.<lambda>>}
NULL_TOKENS = {<TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.NULL: 'NULL'>}
STATEMENT_PARSERS = {<TokenType.ALTER: 'ALTER'>: <function Parser.<lambda>>, <TokenType.BEGIN: 'BEGIN'>: <function Parser.<lambda>>, <TokenType.CACHE: 'CACHE'>: <function Parser.<lambda>>, <TokenType.COMMIT: 'COMMIT'>: <function Parser.<lambda>>, <TokenType.COMMENT: 'COMMENT'>: <function Parser.<lambda>>, <TokenType.CREATE: 'CREATE'>: <function Parser.<lambda>>, <TokenType.DELETE: 'DELETE'>: <function Parser.<lambda>>, <TokenType.DESC: 'DESC'>: <function Parser.<lambda>>, <TokenType.DESCRIBE: 'DESCRIBE'>: <function Parser.<lambda>>, <TokenType.DROP: 'DROP'>: <function Parser.<lambda>>, <TokenType.INSERT: 'INSERT'>: <function Parser.<lambda>>, <TokenType.KILL: 'KILL'>: <function Parser.<lambda>>, <TokenType.LOAD: 'LOAD'>: <function Parser.<lambda>>, <TokenType.MERGE: 'MERGE'>: <function Parser.<lambda>>, <TokenType.PIVOT: 'PIVOT'>: <function Parser.<lambda>>, <TokenType.PRAGMA: 'PRAGMA'>: <function Parser.<lambda>>, <TokenType.REFRESH: 'REFRESH'>: <function Parser.<lambda>>, <TokenType.ROLLBACK: 'ROLLBACK'>: <function Parser.<lambda>>, <TokenType.SET: 'SET'>: <function Parser.<lambda>>, <TokenType.UNCACHE: 'UNCACHE'>: <function Parser.<lambda>>, <TokenType.UPDATE: 'UPDATE'>: <function Parser.<lambda>>, <TokenType.USE: 'USE'>: <function Parser.<lambda>>, <TokenType.END: 'END'>: <function BigQuery.Parser.<lambda>>, <TokenType.FOR: 'FOR'>: <function BigQuery.Parser.<lambda>>}
TABLE_ALIAS_TOKENS = {<TokenType.COMMENT: 'COMMENT'>, <TokenType.COLLATE: 'COLLATE'>, <TokenType.USERDEFINED: 'USERDEFINED'>, <TokenType.FALSE: 'FALSE'>, <TokenType.BIGINT: 'BIGINT'>, <TokenType.SMALLMONEY: 'SMALLMONEY'>, <TokenType.UUID: 'UUID'>, <TokenType.TINYTEXT: 'TINYTEXT'>, <TokenType.XML: 'XML'>, <TokenType.USMALLINT: 'USMALLINT'>, <TokenType.MONEY: 'MONEY'>, <TokenType.TIME: 'TIME'>, <TokenType.ANTI: 'ANTI'>, <TokenType.SCHEMA: 'SCHEMA'>, <TokenType.SMALLSERIAL: 'SMALLSERIAL'>, <TokenType.PRAGMA: 'PRAGMA'>, <TokenType.PARTITION: 'PARTITION'>, <TokenType.TINYBLOB: 'TINYBLOB'>, <TokenType.TSMULTIRANGE: 'TSMULTIRANGE'>, <TokenType.SHOW: 'SHOW'>, <TokenType.BINARY: 'BINARY'>, <TokenType.ENUM16: 'ENUM16'>, <TokenType.TSTZRANGE: 'TSTZRANGE'>, <TokenType.INT4RANGE: 'INT4RANGE'>, <TokenType.DECIMAL: 'DECIMAL'>, <TokenType.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>, <TokenType.UINT: 'UINT'>, <TokenType.AUTO_INCREMENT: 'AUTO_INCREMENT'>, <TokenType.ANY: 'ANY'>, <TokenType.UBIGINT: 'UBIGINT'>, <TokenType.ROWS: 'ROWS'>, <TokenType.MEDIUMTEXT: 'MEDIUMTEXT'>, <TokenType.SETTINGS: 'SETTINGS'>, <TokenType.MEDIUMBLOB: 'MEDIUMBLOB'>, <TokenType.DATE: 'DATE'>, <TokenType.ENUM8: 'ENUM8'>, <TokenType.SMALLINT: 'SMALLINT'>, <TokenType.EXISTS: 'EXISTS'>, <TokenType.NULLABLE: 'NULLABLE'>, <TokenType.INT: 'INT'>, <TokenType.BIT: 'BIT'>, <TokenType.STRUCT: 'STRUCT'>, <TokenType.INTERVAL: 'INTERVAL'>, <TokenType.CURRENT_TIMESTAMP: 'CURRENT_TIMESTAMP'>, <TokenType.RANGE: 'RANGE'>, <TokenType.VARIANT: 'VARIANT'>, <TokenType.TINYINT: 'TINYINT'>, <TokenType.LOWCARDINALITY: 'LOWCARDINALITY'>, <TokenType.FIXEDSTRING: 'FIXEDSTRING'>, <TokenType.DESC: 'DESC'>, <TokenType.INET: 'INET'>, <TokenType.OVERLAPS: 'OVERLAPS'>, <TokenType.CURRENT_DATE: 'CURRENT_DATE'>, <TokenType.FUNCTION: 'FUNCTION'>, <TokenType.ORDINALITY: 'ORDINALITY'>, <TokenType.FOREIGN_KEY: 'FOREIGN_KEY'>, <TokenType.VARBINARY: 'VARBINARY'>, <TokenType.BIGSERIAL: 'BIGSERIAL'>, <TokenType.DIV: 'DIV'>, <TokenType.SET: 'SET'>, <TokenType.SEMI: 'SEMI'>, <TokenType.DATETIME: 'DATETIME'>, <TokenType.HLLSKETCH: 'HLLSKETCH'>, <TokenType.TSTZMULTIRANGE: 'TSTZMULTIRANGE'>, <TokenType.ALL: 'ALL'>, <TokenType.BIGDECIMAL: 'BIGDECIMAL'>, <TokenType.DESCRIBE: 'DESCRIBE'>, <TokenType.NCHAR: 'NCHAR'>, <TokenType.TIMESTAMP_NS: 'TIMESTAMP_NS'>, <TokenType.PERCENT: 'PERCENT'>, <TokenType.DATABASE: 'DATABASE'>, <TokenType.PROCEDURE: 'PROCEDURE'>, <TokenType.OBJECT_IDENTIFIER: 'OBJECT_IDENTIFIER'>, <TokenType.FORMAT: 'FORMAT'>, <TokenType.ASC: 'ASC'>, <TokenType.SUPER: 'SUPER'>, <TokenType.NEXT: 'NEXT'>, <TokenType.UNKNOWN: 'UNKNOWN'>, <TokenType.VOLATILE: 'VOLATILE'>, <TokenType.YEAR: 'YEAR'>, <TokenType.MERGE: 'MERGE'>, <TokenType.MODEL: 'MODEL'>, <TokenType.IPPREFIX: 'IPPREFIX'>, <TokenType.VIEW: 'VIEW'>, <TokenType.MAP: 'MAP'>, <TokenType.KILL: 'KILL'>, <TokenType.OBJECT: 'OBJECT'>, <TokenType.TABLE: 'TABLE'>, <TokenType.UINT128: 'UINT128'>, <TokenType.CURRENT_DATETIME: 'CURRENT_DATETIME'>, <TokenType.CACHE: 'CACHE'>, <TokenType.INT8RANGE: 'INT8RANGE'>, <TokenType.IPADDRESS: 'IPADDRESS'>, <TokenType.FLOAT: 'FLOAT'>, <TokenType.DELETE: 'DELETE'>, <TokenType.INT256: 'INT256'>, <TokenType.HSTORE: 'HSTORE'>, <TokenType.TRUE: 'TRUE'>, <TokenType.ESCAPE: 'ESCAPE'>, <TokenType.GEOMETRY: 'GEOMETRY'>, <TokenType.BOOLEAN: 'BOOLEAN'>, <TokenType.VARCHAR: 'VARCHAR'>, <TokenType.TIMESTAMPTZ: 'TIMESTAMPTZ'>, <TokenType.OPERATOR: 'OPERATOR'>, <TokenType.TIMESTAMP_MS: 'TIMESTAMP_MS'>, <TokenType.UNIQUE: 'UNIQUE'>, <TokenType.BEGIN: 'BEGIN'>, <TokenType.TIMESTAMP_S: 'TIMESTAMP_S'>, <TokenType.REFRESH: 'REFRESH'>, <TokenType.EXECUTE: 'EXECUTE'>, <TokenType.IS: 'IS'>, <TokenType.CHAR: 'CHAR'>, <TokenType.VAR: 'VAR'>, <TokenType.COMMAND: 'COMMAND'>, <TokenType.TIMESTAMP: 'TIMESTAMP'>, <TokenType.JSONB: 'JSONB'>, <TokenType.NUMRANGE: 'NUMRANGE'>, <TokenType.PIVOT: 'PIVOT'>, <TokenType.COLUMN: 'COLUMN'>, <TokenType.CONSTRAINT: 'CONSTRAINT'>, <TokenType.UMEDIUMINT: 'UMEDIUMINT'>, <TokenType.INT8MULTIRANGE: 'INT8MULTIRANGE'>, <TokenType.FILTER: 'FILTER'>, <TokenType.REFERENCES: 'REFERENCES'>, <TokenType.USE: 'USE'>, <TokenType.CURRENT_TIME: 'CURRENT_TIME'>, <TokenType.SOME: 'SOME'>, <TokenType.JSON: 'JSON'>, <TokenType.NESTED: 'NESTED'>, <TokenType.END: 'END'>, <TokenType.RECURSIVE: 'RECURSIVE'>, <TokenType.SERIAL: 'SERIAL'>, <TokenType.UINT256: 'UINT256'>, <TokenType.ENUM: 'ENUM'>, <TokenType.CASE: 'CASE'>, <TokenType.DATERANGE: 'DATERANGE'>, <TokenType.CURRENT_USER: 'CURRENT_USER'>, <TokenType.DOUBLE: 'DOUBLE'>, <TokenType.FIRST: 'FIRST'>, <TokenType.OVERWRITE: 'OVERWRITE'>, <TokenType.TIMETZ: 'TIMETZ'>, <TokenType.TEXT: 'TEXT'>, <TokenType.TSRANGE: 'TSRANGE'>, <TokenType.NVARCHAR: 'NVARCHAR'>, <TokenType.UNIQUEIDENTIFIER: 'UNIQUEIDENTIFIER'>, <TokenType.GEOGRAPHY: 'GEOGRAPHY'>, <TokenType.IMAGE: 'IMAGE'>, <TokenType.TOP: 'TOP'>, <TokenType.UDECIMAL: 'UDECIMAL'>, <TokenType.ISNULL: 'ISNULL'>, <TokenType.KEEP: 'KEEP'>, <TokenType.DATEMULTIRANGE: 'DATEMULTIRANGE'>, <TokenType.ARRAY: 'ARRAY'>, <TokenType.LONGBLOB: 'LONGBLOB'>, <TokenType.LONGTEXT: 'LONGTEXT'>, <TokenType.INT4MULTIRANGE: 'INT4MULTIRANGE'>, <TokenType.UPDATE: 'UPDATE'>, <TokenType.INT128: 'INT128'>, <TokenType.INDEX: 'INDEX'>, <TokenType.TEMPORARY: 'TEMPORARY'>, <TokenType.UTINYINT: 'UTINYINT'>, <TokenType.NULL: 'NULL'>, <TokenType.DATETIME64: 'DATETIME64'>, <TokenType.DICTIONARY: 'DICTIONARY'>, <TokenType.DEFAULT: 'DEFAULT'>, <TokenType.UNPIVOT: 'UNPIVOT'>, <TokenType.PSEUDO_TYPE: 'PSEUDO_TYPE'>, <TokenType.NUMMULTIRANGE: 'NUMMULTIRANGE'>, <TokenType.COMMIT: 'COMMIT'>, <TokenType.LOAD: 'LOAD'>, <TokenType.MEDIUMINT: 'MEDIUMINT'>, <TokenType.ROWVERSION: 'ROWVERSION'>, <TokenType.ROW: 'ROW'>}
LOG_BASE_FIRST = False
TOKENIZER_CLASS: Type[sqlglot.tokens.Tokenizer] = <class 'BigQuery.Tokenizer'>
UNNEST_COLUMN_ONLY: bool = True
SUPPORTS_USER_DEFINED_TYPES = False
NORMALIZE_FUNCTIONS = False
SHOW_TRIE: Dict = {}
SET_TRIE: Dict = {'GLOBAL': {0: True}, 'LOCAL': {0: True}, 'SESSION': {0: True}, 'TRANSACTION': {0: True}}
FORMAT_MAPPING: Dict[str, str] = {'DD': '%d', 'MM': '%m', 'MON': '%b', 'MONTH': '%B', 'YYYY': '%Y', 'YY': '%y', 'HH': '%I', 'HH12': '%I', 'HH24': '%H', 'MI': '%M', 'SS': '%S', 'SSSSS': '%f', 'TZH': '%z'}
FORMAT_TRIE: Dict = {'D': {'D': {0: True}}, 'M': {'M': {0: True}, 'O': {'N': {0: True, 'T': {'H': {0: True}}}}, 'I': {0: True}}, 'Y': {'Y': {'Y': {'Y': {0: True}}, 0: True}}, 'H': {'H': {0: True, '1': {'2': {0: True}}, '2': {'4': {0: True}}}}, 'S': {'S': {0: True, 'S': {'S': {'S': {0: True}}}}}, 'T': {'Z': {'H': {0: True}}}}
TIME_MAPPING: Dict[str, str] = {'%D': '%m/%d/%y'}
TIME_TRIE: Dict = {'%': {'D': {0: True}}}
class BigQuery.Generator(sqlglot.generator.Generator):
481    class Generator(generator.Generator):
482        EXPLICIT_UNION = True
483        INTERVAL_ALLOWS_PLURAL_FORM = False
484        JOIN_HINTS = False
485        QUERY_HINTS = False
486        TABLE_HINTS = False
487        LIMIT_FETCH = "LIMIT"
488        RENAME_TABLE_WITH_DB = False
489        NVL2_SUPPORTED = False
490        UNNEST_WITH_ORDINALITY = False
491        COLLATE_IS_FUNC = True
492        LIMIT_ONLY_LITERALS = True
493
494        TRANSFORMS = {
495            **generator.Generator.TRANSFORMS,
496            exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
497            exp.ArgMax: arg_max_or_min_no_count("MAX_BY"),
498            exp.ArgMin: arg_max_or_min_no_count("MIN_BY"),
499            exp.ArrayContains: _array_contains_sql,
500            exp.ArraySize: rename_func("ARRAY_LENGTH"),
501            exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
502            exp.CollateProperty: lambda self, e: f"DEFAULT COLLATE {self.sql(e, 'this')}"
503            if e.args.get("default")
504            else f"COLLATE {self.sql(e, 'this')}",
505            exp.Create: _create_sql,
506            exp.CTE: transforms.preprocess([_pushdown_cte_column_names]),
507            exp.DateAdd: date_add_interval_sql("DATE", "ADD"),
508            exp.DateDiff: lambda self, e: f"DATE_DIFF({self.sql(e, 'this')}, {self.sql(e, 'expression')}, {self.sql(e.args.get('unit', 'DAY'))})",
509            exp.DateFromParts: rename_func("DATE"),
510            exp.DateStrToDate: datestrtodate_sql,
511            exp.DateSub: date_add_interval_sql("DATE", "SUB"),
512            exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
513            exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
514            exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, e.text("unit")),
515            exp.GenerateSeries: rename_func("GENERATE_ARRAY"),
516            exp.GroupConcat: rename_func("STRING_AGG"),
517            exp.Hex: rename_func("TO_HEX"),
518            exp.If: if_sql(false_value="NULL"),
519            exp.ILike: no_ilike_sql,
520            exp.IntDiv: rename_func("DIV"),
521            exp.JSONFormat: rename_func("TO_JSON_STRING"),
522            exp.JSONKeyValue: json_keyvalue_comma_sql,
523            exp.Max: max_or_greatest,
524            exp.MD5: lambda self, e: self.func("TO_HEX", self.func("MD5", e.this)),
525            exp.MD5Digest: rename_func("MD5"),
526            exp.Min: min_or_least,
527            exp.PartitionedByProperty: lambda self, e: f"PARTITION BY {self.sql(e, 'this')}",
528            exp.RegexpExtract: lambda self, e: self.func(
529                "REGEXP_EXTRACT",
530                e.this,
531                e.expression,
532                e.args.get("position"),
533                e.args.get("occurrence"),
534            ),
535            exp.RegexpReplace: regexp_replace_sql,
536            exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
537            exp.ReturnsProperty: _returnsproperty_sql,
538            exp.Select: transforms.preprocess(
539                [
540                    transforms.explode_to_unnest(),
541                    _unqualify_unnest,
542                    transforms.eliminate_distinct_on,
543                    _alias_ordered_group,
544                    transforms.eliminate_semi_and_anti_joins,
545                ]
546            ),
547            exp.SHA2: lambda self, e: self.func(
548                f"SHA256" if e.text("length") == "256" else "SHA512", e.this
549            ),
550            exp.StabilityProperty: lambda self, e: f"DETERMINISTIC"
551            if e.name == "IMMUTABLE"
552            else "NOT DETERMINISTIC",
553            exp.StrToDate: lambda self, e: f"PARSE_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
554            exp.StrToTime: lambda self, e: self.func(
555                "PARSE_TIMESTAMP", self.format_time(e), e.this, e.args.get("zone")
556            ),
557            exp.TimeAdd: date_add_interval_sql("TIME", "ADD"),
558            exp.TimeSub: date_add_interval_sql("TIME", "SUB"),
559            exp.TimestampAdd: date_add_interval_sql("TIMESTAMP", "ADD"),
560            exp.TimestampSub: date_add_interval_sql("TIMESTAMP", "SUB"),
561            exp.TimeStrToTime: timestrtotime_sql,
562            exp.TimeToStr: lambda self, e: f"FORMAT_DATE({self.format_time(e)}, {self.sql(e, 'this')})",
563            exp.Trim: lambda self, e: self.func(f"TRIM", e.this, e.expression),
564            exp.TsOrDsAdd: _ts_or_ds_add_sql,
565            exp.TsOrDsDiff: _ts_or_ds_diff_sql,
566            exp.TsOrDsToDate: ts_or_ds_to_date_sql("bigquery"),
567            exp.Unhex: rename_func("FROM_HEX"),
568            exp.UnixToTime: _unix_to_time_sql,
569            exp.Values: _derived_table_values_to_unnest,
570            exp.VariancePop: rename_func("VAR_POP"),
571        }
572
573        TYPE_MAPPING = {
574            **generator.Generator.TYPE_MAPPING,
575            exp.DataType.Type.BIGDECIMAL: "BIGNUMERIC",
576            exp.DataType.Type.BIGINT: "INT64",
577            exp.DataType.Type.BINARY: "BYTES",
578            exp.DataType.Type.BOOLEAN: "BOOL",
579            exp.DataType.Type.CHAR: "STRING",
580            exp.DataType.Type.DECIMAL: "NUMERIC",
581            exp.DataType.Type.DOUBLE: "FLOAT64",
582            exp.DataType.Type.FLOAT: "FLOAT64",
583            exp.DataType.Type.INT: "INT64",
584            exp.DataType.Type.NCHAR: "STRING",
585            exp.DataType.Type.NVARCHAR: "STRING",
586            exp.DataType.Type.SMALLINT: "INT64",
587            exp.DataType.Type.TEXT: "STRING",
588            exp.DataType.Type.TIMESTAMP: "DATETIME",
589            exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP",
590            exp.DataType.Type.TIMESTAMPLTZ: "TIMESTAMP",
591            exp.DataType.Type.TINYINT: "INT64",
592            exp.DataType.Type.VARBINARY: "BYTES",
593            exp.DataType.Type.VARCHAR: "STRING",
594            exp.DataType.Type.VARIANT: "ANY TYPE",
595        }
596
597        PROPERTIES_LOCATION = {
598            **generator.Generator.PROPERTIES_LOCATION,
599            exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
600            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
601        }
602
603        # from: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#reserved_keywords
604        RESERVED_KEYWORDS = {
605            *generator.Generator.RESERVED_KEYWORDS,
606            "all",
607            "and",
608            "any",
609            "array",
610            "as",
611            "asc",
612            "assert_rows_modified",
613            "at",
614            "between",
615            "by",
616            "case",
617            "cast",
618            "collate",
619            "contains",
620            "create",
621            "cross",
622            "cube",
623            "current",
624            "default",
625            "define",
626            "desc",
627            "distinct",
628            "else",
629            "end",
630            "enum",
631            "escape",
632            "except",
633            "exclude",
634            "exists",
635            "extract",
636            "false",
637            "fetch",
638            "following",
639            "for",
640            "from",
641            "full",
642            "group",
643            "grouping",
644            "groups",
645            "hash",
646            "having",
647            "if",
648            "ignore",
649            "in",
650            "inner",
651            "intersect",
652            "interval",
653            "into",
654            "is",
655            "join",
656            "lateral",
657            "left",
658            "like",
659            "limit",
660            "lookup",
661            "merge",
662            "natural",
663            "new",
664            "no",
665            "not",
666            "null",
667            "nulls",
668            "of",
669            "on",
670            "or",
671            "order",
672            "outer",
673            "over",
674            "partition",
675            "preceding",
676            "proto",
677            "qualify",
678            "range",
679            "recursive",
680            "respect",
681            "right",
682            "rollup",
683            "rows",
684            "select",
685            "set",
686            "some",
687            "struct",
688            "tablesample",
689            "then",
690            "to",
691            "treat",
692            "true",
693            "unbounded",
694            "union",
695            "unnest",
696            "using",
697            "when",
698            "where",
699            "window",
700            "with",
701            "within",
702        }
703
704        def eq_sql(self, expression: exp.EQ) -> str:
705            # Operands of = cannot be NULL in BigQuery
706            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
707                return "NULL"
708
709            return self.binary(expression, "=")
710
711        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
712            parent = expression.parent
713
714            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
715            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
716            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
717                return self.func(
718                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
719                )
720
721            return super().attimezone_sql(expression)
722
723        def trycast_sql(self, expression: exp.TryCast) -> str:
724            return self.cast_sql(expression, safe_prefix="SAFE_")
725
726        def cte_sql(self, expression: exp.CTE) -> str:
727            if expression.alias_column_names:
728                self.unsupported("Column names in CTE definition are not supported.")
729            return super().cte_sql(expression)
730
731        def array_sql(self, expression: exp.Array) -> str:
732            first_arg = seq_get(expression.expressions, 0)
733            if isinstance(first_arg, exp.Subqueryable):
734                return f"ARRAY{self.wrap(self.sql(first_arg))}"
735
736            return inline_array_sql(self, expression)
737
738        def transaction_sql(self, *_) -> str:
739            return "BEGIN TRANSACTION"
740
741        def commit_sql(self, *_) -> str:
742            return "COMMIT TRANSACTION"
743
744        def rollback_sql(self, *_) -> str:
745            return "ROLLBACK TRANSACTION"
746
747        def in_unnest_op(self, expression: exp.Unnest) -> str:
748            return self.sql(expression)
749
750        def except_op(self, expression: exp.Except) -> str:
751            if not expression.args.get("distinct", False):
752                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
753            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
754
755        def intersect_op(self, expression: exp.Intersect) -> str:
756            if not expression.args.get("distinct", False):
757                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
758            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
759
760        def with_properties(self, properties: exp.Properties) -> str:
761            return self.properties(properties, prefix=self.seg("OPTIONS"))
762
763        def version_sql(self, expression: exp.Version) -> str:
764            if expression.name == "TIMESTAMP":
765                expression.set("this", "SYSTEM_TIME")
766            return super().version_sql(expression)

Generator converts a given syntax tree to the corresponding SQL string.

Arguments:
  • pretty: Whether or not to format the produced SQL string. Default: False.
  • identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
  • normalize: Whether or not to normalize identifiers to lowercase. Default: False.
  • pad: Determines the pad size in a formatted string. Default: 2.
  • indent: Determines the indentation size in a formatted string. Default: 2.
  • normalize_functions: Whether or not to normalize all function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
  • unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma: Determines whether or not the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether or not to preserve comments in the output SQL code. Default: True
EXPLICIT_UNION = True
INTERVAL_ALLOWS_PLURAL_FORM = False
JOIN_HINTS = False
QUERY_HINTS = False
TABLE_HINTS = False
LIMIT_FETCH = 'LIMIT'
RENAME_TABLE_WITH_DB = False
NVL2_SUPPORTED = False
UNNEST_WITH_ORDINALITY = False
COLLATE_IS_FUNC = True
LIMIT_ONLY_LITERALS = True
TRANSFORMS = {<class 'sqlglot.expressions.DateAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.CaseSpecificColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CharacterSetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CheckColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CollateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.CommentColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DateFormatColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.DefaultColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.EncodeColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ExternalProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.HeapProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InlineLengthColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.InputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.IntervalSpan'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LanguageProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LocationProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.LogProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.MaterializedProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NonClusteredColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.NotForReplicationColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnCommitProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OnUpdateColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.OutputModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.PathColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ReturnsProperty'>: <function _returnsproperty_sql>, <class 'sqlglot.expressions.SampleProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SetProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SettingsProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.StabilityProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TemporaryProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ToTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransientProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TransformModelProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.TitleColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.UppercaseColumnConstraint'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VarMap'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.VolatileProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <function Generator.<lambda>>, <class 'sqlglot.expressions.ApproxDistinct'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.ArgMax'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArgMin'>: <function arg_max_or_min_no_count.<locals>._arg_max_or_min_sql>, <class 'sqlglot.expressions.ArrayContains'>: <function _array_contains_sql>, <class 'sqlglot.expressions.ArraySize'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Cast'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.CollateProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Create'>: <function _create_sql>, <class 'sqlglot.expressions.CTE'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.DateDiff'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.DateFromParts'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.DateStrToDate'>: <function datestrtodate_sql>, <class 'sqlglot.expressions.DateSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DatetimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.DateTrunc'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.GenerateSeries'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.GroupConcat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Hex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.If'>: <function if_sql.<locals>._if_sql>, <class 'sqlglot.expressions.ILike'>: <function no_ilike_sql>, <class 'sqlglot.expressions.IntDiv'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONFormat'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.JSONKeyValue'>: <function json_keyvalue_comma_sql>, <class 'sqlglot.expressions.Max'>: <function max_or_greatest>, <class 'sqlglot.expressions.MD5'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.MD5Digest'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Min'>: <function min_or_least>, <class 'sqlglot.expressions.PartitionedByProperty'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpExtract'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.RegexpReplace'>: <function regexp_replace_sql>, <class 'sqlglot.expressions.RegexpLike'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.Select'>: <function preprocess.<locals>._to_sql>, <class 'sqlglot.expressions.SHA2'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToDate'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.StrToTime'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TimeAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampAdd'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimestampSub'>: <function date_add_interval_sql.<locals>.func>, <class 'sqlglot.expressions.TimeStrToTime'>: <function timestrtotime_sql>, <class 'sqlglot.expressions.TimeToStr'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.Trim'>: <function BigQuery.Generator.<lambda>>, <class 'sqlglot.expressions.TsOrDsAdd'>: <function _ts_or_ds_add_sql>, <class 'sqlglot.expressions.TsOrDsDiff'>: <function _ts_or_ds_diff_sql>, <class 'sqlglot.expressions.TsOrDsToDate'>: <function ts_or_ds_to_date_sql.<locals>._ts_or_ds_to_date_sql>, <class 'sqlglot.expressions.Unhex'>: <function rename_func.<locals>.<lambda>>, <class 'sqlglot.expressions.UnixToTime'>: <function _unix_to_time_sql>, <class 'sqlglot.expressions.Values'>: <function _derived_table_values_to_unnest>, <class 'sqlglot.expressions.VariancePop'>: <function rename_func.<locals>.<lambda>>}
TYPE_MAPPING = {<Type.NCHAR: 'NCHAR'>: 'STRING', <Type.NVARCHAR: 'NVARCHAR'>: 'STRING', <Type.MEDIUMTEXT: 'MEDIUMTEXT'>: 'TEXT', <Type.LONGTEXT: 'LONGTEXT'>: 'TEXT', <Type.TINYTEXT: 'TINYTEXT'>: 'TEXT', <Type.MEDIUMBLOB: 'MEDIUMBLOB'>: 'BLOB', <Type.LONGBLOB: 'LONGBLOB'>: 'BLOB', <Type.TINYBLOB: 'TINYBLOB'>: 'BLOB', <Type.INET: 'INET'>: 'INET', <Type.BIGDECIMAL: 'BIGDECIMAL'>: 'BIGNUMERIC', <Type.BIGINT: 'BIGINT'>: 'INT64', <Type.BINARY: 'BINARY'>: 'BYTES', <Type.BOOLEAN: 'BOOLEAN'>: 'BOOL', <Type.CHAR: 'CHAR'>: 'STRING', <Type.DECIMAL: 'DECIMAL'>: 'NUMERIC', <Type.DOUBLE: 'DOUBLE'>: 'FLOAT64', <Type.FLOAT: 'FLOAT'>: 'FLOAT64', <Type.INT: 'INT'>: 'INT64', <Type.SMALLINT: 'SMALLINT'>: 'INT64', <Type.TEXT: 'TEXT'>: 'STRING', <Type.TIMESTAMP: 'TIMESTAMP'>: 'DATETIME', <Type.TIMESTAMPTZ: 'TIMESTAMPTZ'>: 'TIMESTAMP', <Type.TIMESTAMPLTZ: 'TIMESTAMPLTZ'>: 'TIMESTAMP', <Type.TINYINT: 'TINYINT'>: 'INT64', <Type.VARBINARY: 'VARBINARY'>: 'BYTES', <Type.VARCHAR: 'VARCHAR'>: 'STRING', <Type.VARIANT: 'VARIANT'>: 'ANY TYPE'}
PROPERTIES_LOCATION = {<class 'sqlglot.expressions.AlgorithmProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.AutoIncrementProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.BlockCompressionProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CharacterSetProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ChecksumProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.CollateProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.CopyGrantsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Cluster'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ClusteredByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DataBlocksizeProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.DefinerProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.DictRange'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DictProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.DistStyleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.EngineProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExecuteAsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ExternalProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.FallbackProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.FileFormatProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.FreespaceProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.HeapProperty'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.InputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.IsolatedLoadingProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.JournalProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.LanguageProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LikeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LocationProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.LockingProperty'>: <Location.POST_ALIAS: 'POST_ALIAS'>, <class 'sqlglot.expressions.LogProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.MaterializedProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.MergeBlockRatioProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.NoPrimaryIndexProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.OnProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OnCommitProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.Order'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.OutputModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedByProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PartitionedOfProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.PrimaryKey'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Property'>: <Location.POST_WITH: 'POST_WITH'>, <class 'sqlglot.expressions.RemoteWithConnectionModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.ReturnsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatDelimitedProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.RowFormatSerdeProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SampleProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SchemaCommentProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SerdeProperties'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.Set'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SettingsProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SetProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.SortKeyProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.SqlSecurityProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.StabilityProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TemporaryProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.ToTableProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.TransientProperty'>: <Location.POST_CREATE: 'POST_CREATE'>, <class 'sqlglot.expressions.TransformModelProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.MergeTreeTTL'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>, <class 'sqlglot.expressions.VolatileProperty'>: <Location.UNSUPPORTED: 'UNSUPPORTED'>, <class 'sqlglot.expressions.WithDataProperty'>: <Location.POST_EXPRESSION: 'POST_EXPRESSION'>, <class 'sqlglot.expressions.WithJournalTableProperty'>: <Location.POST_NAME: 'POST_NAME'>, <class 'sqlglot.expressions.WithSystemVersioningProperty'>: <Location.POST_SCHEMA: 'POST_SCHEMA'>}
RESERVED_KEYWORDS = {'grouping', 'recursive', 'tablesample', 'not', 'if', 'inner', 'on', 'select', 'unbounded', 'collate', 'create', 'some', 'following', 'cast', 'when', 'using', 'merge', 'over', 'partition', 'true', 'by', 'ignore', 'default', 'for', 'in', 'new', 'extract', 'desc', 'groups', 'struct', 'full', 'fetch', 'else', 'end', 'window', 'hash', 'set', 'of', 'cross', 'is', 'exclude', 'respect', 'lateral', 'right', 'all', 'assert_rows_modified', 'range', 'null', 'between', 'join', 'enum', 'left', 'rollup', 'where', 'except', 'into', 'no', 'treat', 'cube', 'within', 'escape', 'exists', 'from', 'array', 'like', 'and', 'false', 'interval', 'limit', 'asc', 'define', 'distinct', 'any', 'at', 'order', 'to', 'or', 'preceding', 'case', 'rows', 'group', 'qualify', 'union', 'intersect', 'unnest', 'lookup', 'current', 'as', 'nulls', 'contains', 'outer', 'natural', 'proto', 'then', 'with', 'having'}
def eq_sql(self, expression: sqlglot.expressions.EQ) -> str:
704        def eq_sql(self, expression: exp.EQ) -> str:
705            # Operands of = cannot be NULL in BigQuery
706            if isinstance(expression.left, exp.Null) or isinstance(expression.right, exp.Null):
707                return "NULL"
708
709            return self.binary(expression, "=")
def attimezone_sql(self, expression: sqlglot.expressions.AtTimeZone) -> str:
711        def attimezone_sql(self, expression: exp.AtTimeZone) -> str:
712            parent = expression.parent
713
714            # BigQuery allows CAST(.. AS {STRING|TIMESTAMP} [FORMAT <fmt> [AT TIME ZONE <tz>]]).
715            # Only the TIMESTAMP one should use the below conversion, when AT TIME ZONE is included.
716            if not isinstance(parent, exp.Cast) or not parent.to.is_type("text"):
717                return self.func(
718                    "TIMESTAMP", self.func("DATETIME", expression.this, expression.args.get("zone"))
719                )
720
721            return super().attimezone_sql(expression)
def trycast_sql(self, expression: sqlglot.expressions.TryCast) -> str:
723        def trycast_sql(self, expression: exp.TryCast) -> str:
724            return self.cast_sql(expression, safe_prefix="SAFE_")
def cte_sql(self, expression: sqlglot.expressions.CTE) -> str:
726        def cte_sql(self, expression: exp.CTE) -> str:
727            if expression.alias_column_names:
728                self.unsupported("Column names in CTE definition are not supported.")
729            return super().cte_sql(expression)
def array_sql(self, expression: sqlglot.expressions.Array) -> str:
731        def array_sql(self, expression: exp.Array) -> str:
732            first_arg = seq_get(expression.expressions, 0)
733            if isinstance(first_arg, exp.Subqueryable):
734                return f"ARRAY{self.wrap(self.sql(first_arg))}"
735
736            return inline_array_sql(self, expression)
def transaction_sql(self, *_) -> str:
738        def transaction_sql(self, *_) -> str:
739            return "BEGIN TRANSACTION"
def commit_sql(self, *_) -> str:
741        def commit_sql(self, *_) -> str:
742            return "COMMIT TRANSACTION"
def rollback_sql(self, *_) -> str:
744        def rollback_sql(self, *_) -> str:
745            return "ROLLBACK TRANSACTION"
def in_unnest_op(self, expression: sqlglot.expressions.Unnest) -> str:
747        def in_unnest_op(self, expression: exp.Unnest) -> str:
748            return self.sql(expression)
def except_op(self, expression: sqlglot.expressions.Except) -> str:
750        def except_op(self, expression: exp.Except) -> str:
751            if not expression.args.get("distinct", False):
752                self.unsupported("EXCEPT without DISTINCT is not supported in BigQuery")
753            return f"EXCEPT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
def intersect_op(self, expression: sqlglot.expressions.Intersect) -> str:
755        def intersect_op(self, expression: exp.Intersect) -> str:
756            if not expression.args.get("distinct", False):
757                self.unsupported("INTERSECT without DISTINCT is not supported in BigQuery")
758            return f"INTERSECT{' DISTINCT' if expression.args.get('distinct') else ' ALL'}"
def with_properties(self, properties: sqlglot.expressions.Properties) -> str:
760        def with_properties(self, properties: exp.Properties) -> str:
761            return self.properties(properties, prefix=self.seg("OPTIONS"))
def version_sql(self, expression: sqlglot.expressions.Version) -> str:
763        def version_sql(self, expression: exp.Version) -> str:
764            if expression.name == "TIMESTAMP":
765                expression.set("this", "SYSTEM_TIME")
766            return super().version_sql(expression)
LOG_BASE_FIRST = False
INVERSE_TIME_MAPPING: Dict[str, str] = {'%m/%d/%y': '%D'}
INVERSE_TIME_TRIE: Dict = {'%': {'m': {'/': {'%': {'d': {'/': {'%': {'y': {0: True}}}}}}}}}
INVERSE_ESCAPE_SEQUENCES: Dict[str, str] = {'\x07': '\\a', '\x08': '\\b', '\x0c': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0b': '\\v'}
UNNEST_COLUMN_ONLY = True
NORMALIZE_FUNCTIONS: bool | str = False
@classmethod
def can_identify(text: str, identify: str | bool = 'safe') -> bool:
291    @classmethod
292    def can_identify(cls, text: str, identify: str | bool = "safe") -> bool:
293        """Checks if text can be identified given an identify option.
294
295        Args:
296            text: The text to check.
297            identify:
298                "always" or `True`: Always returns true.
299                "safe": True if the identifier is case-insensitive.
300
301        Returns:
302            Whether or not the given text can be identified.
303        """
304        if identify is True or identify == "always":
305            return True
306
307        if identify == "safe":
308            return not cls.case_sensitive(text)
309
310        return False

Checks if text can be identified given an identify option.

Arguments:
  • text: The text to check.
  • identify: "always" or True: Always returns true. "safe": True if the identifier is case-insensitive.
Returns:

Whether or not the given text can be identified.

QUOTE_START = "'"
QUOTE_END = "'"
IDENTIFIER_START = '`'
IDENTIFIER_END = '`'
TOKENIZER_CLASS = <class 'BigQuery.Tokenizer'>
BIT_START: Optional[str] = None
BIT_END: Optional[str] = None
HEX_START: Optional[str] = '0x'
HEX_END: Optional[str] = ''
BYTE_START: Optional[str] = "b'"
BYTE_END: Optional[str] = "'"
Inherited Members
sqlglot.generator.Generator
Generator
NULL_ORDERING_SUPPORTED
LOCKING_READS_SUPPORTED
WRAP_DERIVED_VALUES
CREATE_FUNCTION_RETURN_AS
MATCHED_BY_SOURCE
SINGLE_STRING_INTERVAL
TABLESAMPLE_WITH_METHOD
TABLESAMPLE_SIZE_IS_PERCENT
GROUPINGS_SEP
INDEX_ON
QUERY_HINT_SEP
IS_BOOL_ALLOWED
DUPLICATE_KEY_UPDATE_WITH_SET
LIMIT_IS_TOP
RETURNING_END
COLUMN_JOIN_MARKS_SUPPORTED
EXTRACT_ALLOWS_QUOTES
TZ_TO_WITH_TIME_ZONE
SELECT_KINDS
VALUES_AS_TABLE
ALTER_TABLE_ADD_COLUMN_KEYWORD
AGGREGATE_FILTER_SUPPORTED
SEMI_ANTI_JOIN_WITH_SIDE
SUPPORTS_PARAMETERS
COMPUTED_COLUMN_WITH_TYPE
SUPPORTS_TABLE_COPY
TABLESAMPLE_REQUIRES_PARENS
DATA_TYPE_SPECIFIERS_ALLOWED
ENSURE_BOOLS
CTE_RECURSIVE_KEYWORD_REQUIRED
TYPED_DIVISION
SAFE_DIVISION
STAR_MAPPING
TIME_PART_SINGULARS
TOKEN_MAPPING
STRUCT_DELIMITER
PARAMETER_TOKEN
WITH_SEPARATED_COMMENTS
EXCLUDE_COMMENTS
UNWRAPPED_INTERVAL_VALUES
EXPRESSIONS_WITHOUT_NESTED_CTES
KEY_VALUE_DEFINITONS
SENTINEL_LINE_BREAK
INDEX_OFFSET
ALIAS_POST_TABLESAMPLE
IDENTIFIERS_CAN_START_WITH_DIGIT
STRICT_STRING_CONCAT
NULL_ORDERING
pretty
identify
normalize
pad
unsupported_level
max_unsupported
leading_comma
max_text_width
comments
normalize_functions
unsupported_messages
generate
preprocess
unsupported
sep
seg
pad_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_sql
columnposition_sql
columndef_sql
columnconstraint_sql
computedcolumnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasidentitycolumnconstraint_sql
generatedasrowcolumnconstraint_sql
periodforsystemtimeconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
createable_sql
create_sql
clone_sql
describe_sql
prepend_ctes
with_sql
tablealias_sql
bitstring_sql
hexstring_sql
bytestring_sql
rawstring_sql
datatypeparam_sql
datatype_sql
directory_sql
delete_sql
drop_sql
except_sql
fetch_sql
filter_sql
hint_sql
index_sql
identifier_sql
inputoutputformat_sql
national_sql
partition_sql
properties_sql
root_properties
properties
locate_properties
property_name
property_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
partitionboundspec_sql
partitionedofproperty_sql
lockingproperty_sql
withdataproperty_sql
withsystemversioningproperty_sql
insert_sql
intersect_sql
introducer_sql
kill_sql
pseudotype_sql
objectidentifier_sql
onconflict_sql
returning_sql
rowformatdelimitedproperty_sql
withtablehint_sql
indextablehint_sql
table_sql
tablesample_sql
pivot_sql
tuple_sql
update_sql
values_sql
var_sql
into_sql
from_sql
group_sql
having_sql
connect_sql
prior_sql
join_sql
lambda_sql
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
pragma_sql
lock_sql
literal_sql
escape_str
loaddata_sql
null_sql
boolean_sql
order_sql
cluster_sql
distribute_sql
sort_sql
ordered_sql
matchrecognize_sql
query_modifiers
offset_limit_modifiers
after_having_modifiers
after_limit_modifiers
select_sql
schema_sql
schema_columns_sql
star_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
union_sql
union_op
unnest_sql
where_sql
window_sql
partition_by_sql
windowspec_sql
withingroup_sql
between_sql
bracket_sql
safebracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
nextvaluefor_sql
extract_sql
trim_sql
safeconcat_sql
check_sql
foreignkey_sql
primarykey_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
formatjson_sql
jsonobject_sql
jsonarray_sql
jsonarrayagg_sql
jsoncolumndef_sql
jsonschema_sql
jsontable_sql
openjsoncolumndef_sql
openjson_sql
in_sql
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
aliases_sql
add_sql
and_sql
xor_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
cast_sql
currentdate_sql
collate_sql
command_sql
comment_sql
mergetreettlaction_sql
mergetreettl_sql
altercolumn_sql
renametable_sql
altertable_sql
add_column_sql
droppartition_sql
addconstraint_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
intdiv_sql
dpipe_sql
safedpipe_sql
div_sql
overlaps_sql
distance_sql
dot_sql
propertyeq_sql
escape_sql
glob_sql
gt_sql
gte_sql
ilike_sql
ilikeany_sql
is_sql
like_sql
likeany_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
or_sql
slice_sql
sub_sql
log_sql
use_sql
binary
function_fallback_sql
func
format_args
text_width
format_time
expressions
op_expressions
naked_property
set_operation
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
merge_sql
tochar_sql
dictproperty_sql
dictrange_sql
dictsubproperty_sql
oncluster_sql
clusteredbyproperty_sql
anyvalue_sql
querytransform_sql
indexconstraintoption_sql
indexcolumnconstraint_sql
nvl2_sql
comprehension_sql
columnprefix_sql
opclass_sql
predict_sql
forin_sql
refresh_sql
operator_sql