sqlglot.dialects.presto
1from __future__ import annotations 2 3import typing as t 4 5from sqlglot import exp, generator, parser, tokens, transforms 6from sqlglot.dialects.dialect import ( 7 Dialect, 8 date_trunc_to_time, 9 format_time_lambda, 10 if_sql, 11 left_to_substring_sql, 12 no_ilike_sql, 13 no_pivot_sql, 14 no_safe_divide_sql, 15 rename_func, 16 right_to_substring_sql, 17 struct_extract_sql, 18 timestamptrunc_sql, 19 timestrtotime_sql, 20) 21from sqlglot.dialects.mysql import MySQL 22from sqlglot.errors import UnsupportedError 23from sqlglot.helper import seq_get 24from sqlglot.tokens import TokenType 25 26 27def _approx_distinct_sql(self: generator.Generator, expression: exp.ApproxDistinct) -> str: 28 accuracy = expression.args.get("accuracy") 29 accuracy = ", " + self.sql(accuracy) if accuracy else "" 30 return f"APPROX_DISTINCT({self.sql(expression, 'this')}{accuracy})" 31 32 33def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: 34 sql = self.datatype_sql(expression) 35 if expression.is_type("timestamptz"): 36 sql = f"{sql} WITH TIME ZONE" 37 return sql 38 39 40def _explode_to_unnest_sql(self: generator.Generator, expression: exp.Lateral) -> str: 41 if isinstance(expression.this, (exp.Explode, exp.Posexplode)): 42 return self.sql( 43 exp.Join( 44 this=exp.Unnest( 45 expressions=[expression.this.this], 46 alias=expression.args.get("alias"), 47 ordinality=isinstance(expression.this, exp.Posexplode), 48 ), 49 kind="cross", 50 ) 51 ) 52 return self.lateral_sql(expression) 53 54 55def _initcap_sql(self: generator.Generator, expression: exp.Initcap) -> str: 56 regex = r"(\w)(\w*)" 57 return f"REGEXP_REPLACE({self.sql(expression, 'this')}, '{regex}', x -> UPPER(x[1]) || LOWER(x[2]))" 58 59 60def _decode_sql(self: generator.Generator, expression: exp.Decode) -> str: 61 _ensure_utf8(expression.args["charset"]) 62 return self.func("FROM_UTF8", expression.this, expression.args.get("replace")) 63 64 65def _encode_sql(self: generator.Generator, expression: exp.Encode) -> str: 66 _ensure_utf8(expression.args["charset"]) 67 return f"TO_UTF8({self.sql(expression, 'this')})" 68 69 70def _no_sort_array(self: generator.Generator, expression: exp.SortArray) -> str: 71 if expression.args.get("asc") == exp.false(): 72 comparator = "(a, b) -> CASE WHEN a < b THEN 1 WHEN a > b THEN -1 ELSE 0 END" 73 else: 74 comparator = None 75 return self.func("ARRAY_SORT", expression.this, comparator) 76 77 78def _schema_sql(self: generator.Generator, expression: exp.Schema) -> str: 79 if isinstance(expression.parent, exp.Property): 80 columns = ", ".join(f"'{c.name}'" for c in expression.expressions) 81 return f"ARRAY[{columns}]" 82 83 if expression.parent: 84 for schema in expression.parent.find_all(exp.Schema): 85 if isinstance(schema.parent, exp.Property): 86 expression = expression.copy() 87 expression.expressions.extend(schema.expressions) 88 89 return self.schema_sql(expression) 90 91 92def _quantile_sql(self: generator.Generator, expression: exp.Quantile) -> str: 93 self.unsupported("Presto does not support exact quantiles") 94 return f"APPROX_PERCENTILE({self.sql(expression, 'this')}, {self.sql(expression, 'quantile')})" 95 96 97def _str_to_time_sql( 98 self: generator.Generator, expression: exp.StrToDate | exp.StrToTime | exp.TsOrDsToDate 99) -> str: 100 return f"DATE_PARSE({self.sql(expression, 'this')}, {self.format_time(expression)})" 101 102 103def _ts_or_ds_to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: 104 time_format = self.format_time(expression) 105 if time_format and time_format not in (Presto.TIME_FORMAT, Presto.DATE_FORMAT): 106 return f"CAST({_str_to_time_sql(self, expression)} AS DATE)" 107 return f"CAST(SUBSTR(CAST({self.sql(expression, 'this')} AS VARCHAR), 1, 10) AS DATE)" 108 109 110def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> str: 111 this = expression.this 112 113 if not isinstance(this, exp.CurrentDate): 114 this = self.func( 115 "DATE_PARSE", 116 self.func( 117 "SUBSTR", 118 this if this.is_string else exp.cast(this, "VARCHAR"), 119 exp.Literal.number(1), 120 exp.Literal.number(10), 121 ), 122 Presto.DATE_FORMAT, 123 ) 124 125 return self.func( 126 "DATE_ADD", 127 exp.Literal.string(expression.text("unit") or "day"), 128 expression.expression, 129 this, 130 ) 131 132 133def _ensure_utf8(charset: exp.Literal) -> None: 134 if charset.name.lower() != "utf-8": 135 raise UnsupportedError(f"Unsupported charset {charset}") 136 137 138def _approx_percentile(args: t.List) -> exp.Expression: 139 if len(args) == 4: 140 return exp.ApproxQuantile( 141 this=seq_get(args, 0), 142 weight=seq_get(args, 1), 143 quantile=seq_get(args, 2), 144 accuracy=seq_get(args, 3), 145 ) 146 if len(args) == 3: 147 return exp.ApproxQuantile( 148 this=seq_get(args, 0), quantile=seq_get(args, 1), accuracy=seq_get(args, 2) 149 ) 150 return exp.ApproxQuantile.from_arg_list(args) 151 152 153def _from_unixtime(args: t.List) -> exp.Expression: 154 if len(args) == 3: 155 return exp.UnixToTime( 156 this=seq_get(args, 0), 157 hours=seq_get(args, 1), 158 minutes=seq_get(args, 2), 159 ) 160 if len(args) == 2: 161 return exp.UnixToTime(this=seq_get(args, 0), zone=seq_get(args, 1)) 162 163 return exp.UnixToTime.from_arg_list(args) 164 165 166def _unnest_sequence(expression: exp.Expression) -> exp.Expression: 167 if isinstance(expression, exp.Table): 168 if isinstance(expression.this, exp.GenerateSeries): 169 unnest = exp.Unnest(expressions=[expression.this]) 170 171 if expression.alias: 172 return exp.alias_(unnest, alias="_u", table=[expression.alias], copy=False) 173 return unnest 174 return expression 175 176 177class Presto(Dialect): 178 INDEX_OFFSET = 1 179 NULL_ORDERING = "nulls_are_last" 180 TIME_FORMAT = MySQL.TIME_FORMAT 181 TIME_MAPPING = MySQL.TIME_MAPPING 182 STRICT_STRING_CONCAT = True 183 184 class Tokenizer(tokens.Tokenizer): 185 KEYWORDS = { 186 **tokens.Tokenizer.KEYWORDS, 187 "START": TokenType.BEGIN, 188 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 189 "ROW": TokenType.STRUCT, 190 } 191 192 class Parser(parser.Parser): 193 FUNCTIONS = { 194 **parser.Parser.FUNCTIONS, 195 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 196 "APPROX_PERCENTILE": _approx_percentile, 197 "CARDINALITY": exp.ArraySize.from_arg_list, 198 "CONTAINS": exp.ArrayContains.from_arg_list, 199 "DATE_ADD": lambda args: exp.DateAdd( 200 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) 201 ), 202 "DATE_DIFF": lambda args: exp.DateDiff( 203 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) 204 ), 205 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 206 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 207 "DATE_TRUNC": date_trunc_to_time, 208 "FROM_HEX": exp.Unhex.from_arg_list, 209 "FROM_UNIXTIME": _from_unixtime, 210 "FROM_UTF8": lambda args: exp.Decode( 211 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 212 ), 213 "NOW": exp.CurrentTimestamp.from_arg_list, 214 "SEQUENCE": exp.GenerateSeries.from_arg_list, 215 "STRPOS": lambda args: exp.StrPosition( 216 this=seq_get(args, 0), substr=seq_get(args, 1), instance=seq_get(args, 2) 217 ), 218 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 219 "TO_HEX": exp.Hex.from_arg_list, 220 "TO_UTF8": lambda args: exp.Encode( 221 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 222 ), 223 } 224 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 225 FUNCTION_PARSERS.pop("TRIM") 226 227 class Generator(generator.Generator): 228 INTERVAL_ALLOWS_PLURAL_FORM = False 229 JOIN_HINTS = False 230 TABLE_HINTS = False 231 IS_BOOL_ALLOWED = False 232 STRUCT_DELIMITER = ("(", ")") 233 234 PROPERTIES_LOCATION = { 235 **generator.Generator.PROPERTIES_LOCATION, 236 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 237 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 238 } 239 240 TYPE_MAPPING = { 241 **generator.Generator.TYPE_MAPPING, 242 exp.DataType.Type.INT: "INTEGER", 243 exp.DataType.Type.FLOAT: "REAL", 244 exp.DataType.Type.BINARY: "VARBINARY", 245 exp.DataType.Type.TEXT: "VARCHAR", 246 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 247 exp.DataType.Type.STRUCT: "ROW", 248 } 249 250 TRANSFORMS = { 251 **generator.Generator.TRANSFORMS, 252 exp.ApproxDistinct: _approx_distinct_sql, 253 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 254 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 255 exp.ArrayConcat: rename_func("CONCAT"), 256 exp.ArrayContains: rename_func("CONTAINS"), 257 exp.ArraySize: rename_func("CARDINALITY"), 258 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 259 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 260 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 261 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 262 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 263 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 264 exp.Cast: transforms.preprocess([transforms.epoch_cast_to_ts]), 265 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 266 exp.DataType: _datatype_sql, 267 exp.DateAdd: lambda self, e: self.func( 268 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 269 ), 270 exp.DateDiff: lambda self, e: self.func( 271 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 272 ), 273 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.DATE_FORMAT}) AS DATE)", 274 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.DATEINT_FORMAT}) AS INT)", 275 exp.Decode: _decode_sql, 276 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.DATEINT_FORMAT}) AS DATE)", 277 exp.Encode: _encode_sql, 278 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 279 exp.Group: transforms.preprocess([transforms.unalias_group]), 280 exp.Hex: rename_func("TO_HEX"), 281 exp.If: if_sql, 282 exp.ILike: no_ilike_sql, 283 exp.Initcap: _initcap_sql, 284 exp.Lateral: _explode_to_unnest_sql, 285 exp.Left: left_to_substring_sql, 286 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 287 exp.LogicalAnd: rename_func("BOOL_AND"), 288 exp.LogicalOr: rename_func("BOOL_OR"), 289 exp.Pivot: no_pivot_sql, 290 exp.Quantile: _quantile_sql, 291 exp.Right: right_to_substring_sql, 292 exp.SafeDivide: no_safe_divide_sql, 293 exp.Schema: _schema_sql, 294 exp.Select: transforms.preprocess( 295 [ 296 transforms.eliminate_qualify, 297 transforms.eliminate_distinct_on, 298 transforms.explode_to_unnest, 299 ] 300 ), 301 exp.SortArray: _no_sort_array, 302 exp.StrPosition: rename_func("STRPOS"), 303 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 304 exp.StrToTime: _str_to_time_sql, 305 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 306 exp.StructExtract: struct_extract_sql, 307 exp.Table: transforms.preprocess([_unnest_sequence]), 308 exp.TimestampTrunc: timestamptrunc_sql, 309 exp.TimeStrToDate: timestrtotime_sql, 310 exp.TimeStrToTime: timestrtotime_sql, 311 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.TIME_FORMAT}))", 312 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 313 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 314 exp.TryCast: transforms.preprocess([transforms.epoch_cast_to_ts]), 315 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 316 exp.TsOrDsAdd: _ts_or_ds_add_sql, 317 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 318 exp.Unhex: rename_func("FROM_HEX"), 319 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 320 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 321 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 322 exp.VariancePop: rename_func("VAR_POP"), 323 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 324 exp.WithinGroup: transforms.preprocess( 325 [transforms.remove_within_group_for_percentiles] 326 ), 327 } 328 329 def interval_sql(self, expression: exp.Interval) -> str: 330 unit = self.sql(expression, "unit") 331 if expression.this and unit.lower().startswith("week"): 332 return f"({expression.this.name} * INTERVAL '7' day)" 333 return super().interval_sql(expression) 334 335 def transaction_sql(self, expression: exp.Transaction) -> str: 336 modes = expression.args.get("modes") 337 modes = f" {', '.join(modes)}" if modes else "" 338 return f"START TRANSACTION{modes}" 339 340 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 341 start = expression.args["start"] 342 end = expression.args["end"] 343 step = expression.args.get("step") 344 345 if isinstance(start, exp.Cast): 346 target_type = start.to 347 elif isinstance(end, exp.Cast): 348 target_type = end.to 349 else: 350 target_type = None 351 352 if target_type and target_type.is_type("timestamp"): 353 to = target_type.copy() 354 355 if target_type is start.to: 356 end = exp.cast(end, to) 357 else: 358 start = exp.cast(start, to) 359 360 return self.func("SEQUENCE", start, end, step) 361 362 def offset_limit_modifiers( 363 self, expression: exp.Expression, fetch: bool, limit: t.Optional[exp.Fetch | exp.Limit] 364 ) -> t.List[str]: 365 return [ 366 self.sql(expression, "offset"), 367 self.sql(limit), 368 ]
178class Presto(Dialect): 179 INDEX_OFFSET = 1 180 NULL_ORDERING = "nulls_are_last" 181 TIME_FORMAT = MySQL.TIME_FORMAT 182 TIME_MAPPING = MySQL.TIME_MAPPING 183 STRICT_STRING_CONCAT = True 184 185 class Tokenizer(tokens.Tokenizer): 186 KEYWORDS = { 187 **tokens.Tokenizer.KEYWORDS, 188 "START": TokenType.BEGIN, 189 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 190 "ROW": TokenType.STRUCT, 191 } 192 193 class Parser(parser.Parser): 194 FUNCTIONS = { 195 **parser.Parser.FUNCTIONS, 196 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 197 "APPROX_PERCENTILE": _approx_percentile, 198 "CARDINALITY": exp.ArraySize.from_arg_list, 199 "CONTAINS": exp.ArrayContains.from_arg_list, 200 "DATE_ADD": lambda args: exp.DateAdd( 201 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) 202 ), 203 "DATE_DIFF": lambda args: exp.DateDiff( 204 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) 205 ), 206 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 207 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 208 "DATE_TRUNC": date_trunc_to_time, 209 "FROM_HEX": exp.Unhex.from_arg_list, 210 "FROM_UNIXTIME": _from_unixtime, 211 "FROM_UTF8": lambda args: exp.Decode( 212 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 213 ), 214 "NOW": exp.CurrentTimestamp.from_arg_list, 215 "SEQUENCE": exp.GenerateSeries.from_arg_list, 216 "STRPOS": lambda args: exp.StrPosition( 217 this=seq_get(args, 0), substr=seq_get(args, 1), instance=seq_get(args, 2) 218 ), 219 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 220 "TO_HEX": exp.Hex.from_arg_list, 221 "TO_UTF8": lambda args: exp.Encode( 222 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 223 ), 224 } 225 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 226 FUNCTION_PARSERS.pop("TRIM") 227 228 class Generator(generator.Generator): 229 INTERVAL_ALLOWS_PLURAL_FORM = False 230 JOIN_HINTS = False 231 TABLE_HINTS = False 232 IS_BOOL_ALLOWED = False 233 STRUCT_DELIMITER = ("(", ")") 234 235 PROPERTIES_LOCATION = { 236 **generator.Generator.PROPERTIES_LOCATION, 237 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 238 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 239 } 240 241 TYPE_MAPPING = { 242 **generator.Generator.TYPE_MAPPING, 243 exp.DataType.Type.INT: "INTEGER", 244 exp.DataType.Type.FLOAT: "REAL", 245 exp.DataType.Type.BINARY: "VARBINARY", 246 exp.DataType.Type.TEXT: "VARCHAR", 247 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 248 exp.DataType.Type.STRUCT: "ROW", 249 } 250 251 TRANSFORMS = { 252 **generator.Generator.TRANSFORMS, 253 exp.ApproxDistinct: _approx_distinct_sql, 254 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 255 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 256 exp.ArrayConcat: rename_func("CONCAT"), 257 exp.ArrayContains: rename_func("CONTAINS"), 258 exp.ArraySize: rename_func("CARDINALITY"), 259 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 260 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 261 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 262 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 263 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 264 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 265 exp.Cast: transforms.preprocess([transforms.epoch_cast_to_ts]), 266 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 267 exp.DataType: _datatype_sql, 268 exp.DateAdd: lambda self, e: self.func( 269 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 270 ), 271 exp.DateDiff: lambda self, e: self.func( 272 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 273 ), 274 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.DATE_FORMAT}) AS DATE)", 275 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.DATEINT_FORMAT}) AS INT)", 276 exp.Decode: _decode_sql, 277 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.DATEINT_FORMAT}) AS DATE)", 278 exp.Encode: _encode_sql, 279 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 280 exp.Group: transforms.preprocess([transforms.unalias_group]), 281 exp.Hex: rename_func("TO_HEX"), 282 exp.If: if_sql, 283 exp.ILike: no_ilike_sql, 284 exp.Initcap: _initcap_sql, 285 exp.Lateral: _explode_to_unnest_sql, 286 exp.Left: left_to_substring_sql, 287 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 288 exp.LogicalAnd: rename_func("BOOL_AND"), 289 exp.LogicalOr: rename_func("BOOL_OR"), 290 exp.Pivot: no_pivot_sql, 291 exp.Quantile: _quantile_sql, 292 exp.Right: right_to_substring_sql, 293 exp.SafeDivide: no_safe_divide_sql, 294 exp.Schema: _schema_sql, 295 exp.Select: transforms.preprocess( 296 [ 297 transforms.eliminate_qualify, 298 transforms.eliminate_distinct_on, 299 transforms.explode_to_unnest, 300 ] 301 ), 302 exp.SortArray: _no_sort_array, 303 exp.StrPosition: rename_func("STRPOS"), 304 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 305 exp.StrToTime: _str_to_time_sql, 306 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 307 exp.StructExtract: struct_extract_sql, 308 exp.Table: transforms.preprocess([_unnest_sequence]), 309 exp.TimestampTrunc: timestamptrunc_sql, 310 exp.TimeStrToDate: timestrtotime_sql, 311 exp.TimeStrToTime: timestrtotime_sql, 312 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.TIME_FORMAT}))", 313 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 314 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 315 exp.TryCast: transforms.preprocess([transforms.epoch_cast_to_ts]), 316 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 317 exp.TsOrDsAdd: _ts_or_ds_add_sql, 318 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 319 exp.Unhex: rename_func("FROM_HEX"), 320 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 321 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 322 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 323 exp.VariancePop: rename_func("VAR_POP"), 324 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 325 exp.WithinGroup: transforms.preprocess( 326 [transforms.remove_within_group_for_percentiles] 327 ), 328 } 329 330 def interval_sql(self, expression: exp.Interval) -> str: 331 unit = self.sql(expression, "unit") 332 if expression.this and unit.lower().startswith("week"): 333 return f"({expression.this.name} * INTERVAL '7' day)" 334 return super().interval_sql(expression) 335 336 def transaction_sql(self, expression: exp.Transaction) -> str: 337 modes = expression.args.get("modes") 338 modes = f" {', '.join(modes)}" if modes else "" 339 return f"START TRANSACTION{modes}" 340 341 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 342 start = expression.args["start"] 343 end = expression.args["end"] 344 step = expression.args.get("step") 345 346 if isinstance(start, exp.Cast): 347 target_type = start.to 348 elif isinstance(end, exp.Cast): 349 target_type = end.to 350 else: 351 target_type = None 352 353 if target_type and target_type.is_type("timestamp"): 354 to = target_type.copy() 355 356 if target_type is start.to: 357 end = exp.cast(end, to) 358 else: 359 start = exp.cast(start, to) 360 361 return self.func("SEQUENCE", start, end, step) 362 363 def offset_limit_modifiers( 364 self, expression: exp.Expression, fetch: bool, limit: t.Optional[exp.Fetch | exp.Limit] 365 ) -> t.List[str]: 366 return [ 367 self.sql(expression, "offset"), 368 self.sql(limit), 369 ]
185 class Tokenizer(tokens.Tokenizer): 186 KEYWORDS = { 187 **tokens.Tokenizer.KEYWORDS, 188 "START": TokenType.BEGIN, 189 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 190 "ROW": TokenType.STRUCT, 191 }
Inherited Members
193 class Parser(parser.Parser): 194 FUNCTIONS = { 195 **parser.Parser.FUNCTIONS, 196 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 197 "APPROX_PERCENTILE": _approx_percentile, 198 "CARDINALITY": exp.ArraySize.from_arg_list, 199 "CONTAINS": exp.ArrayContains.from_arg_list, 200 "DATE_ADD": lambda args: exp.DateAdd( 201 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) 202 ), 203 "DATE_DIFF": lambda args: exp.DateDiff( 204 this=seq_get(args, 2), expression=seq_get(args, 1), unit=seq_get(args, 0) 205 ), 206 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 207 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 208 "DATE_TRUNC": date_trunc_to_time, 209 "FROM_HEX": exp.Unhex.from_arg_list, 210 "FROM_UNIXTIME": _from_unixtime, 211 "FROM_UTF8": lambda args: exp.Decode( 212 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 213 ), 214 "NOW": exp.CurrentTimestamp.from_arg_list, 215 "SEQUENCE": exp.GenerateSeries.from_arg_list, 216 "STRPOS": lambda args: exp.StrPosition( 217 this=seq_get(args, 0), substr=seq_get(args, 1), instance=seq_get(args, 2) 218 ), 219 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 220 "TO_HEX": exp.Hex.from_arg_list, 221 "TO_UTF8": lambda args: exp.Encode( 222 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 223 ), 224 } 225 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 226 FUNCTION_PARSERS.pop("TRIM")
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: Determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
228 class Generator(generator.Generator): 229 INTERVAL_ALLOWS_PLURAL_FORM = False 230 JOIN_HINTS = False 231 TABLE_HINTS = False 232 IS_BOOL_ALLOWED = False 233 STRUCT_DELIMITER = ("(", ")") 234 235 PROPERTIES_LOCATION = { 236 **generator.Generator.PROPERTIES_LOCATION, 237 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 238 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 239 } 240 241 TYPE_MAPPING = { 242 **generator.Generator.TYPE_MAPPING, 243 exp.DataType.Type.INT: "INTEGER", 244 exp.DataType.Type.FLOAT: "REAL", 245 exp.DataType.Type.BINARY: "VARBINARY", 246 exp.DataType.Type.TEXT: "VARCHAR", 247 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 248 exp.DataType.Type.STRUCT: "ROW", 249 } 250 251 TRANSFORMS = { 252 **generator.Generator.TRANSFORMS, 253 exp.ApproxDistinct: _approx_distinct_sql, 254 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 255 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 256 exp.ArrayConcat: rename_func("CONCAT"), 257 exp.ArrayContains: rename_func("CONTAINS"), 258 exp.ArraySize: rename_func("CARDINALITY"), 259 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 260 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 261 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 262 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 263 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 264 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 265 exp.Cast: transforms.preprocess([transforms.epoch_cast_to_ts]), 266 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 267 exp.DataType: _datatype_sql, 268 exp.DateAdd: lambda self, e: self.func( 269 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 270 ), 271 exp.DateDiff: lambda self, e: self.func( 272 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 273 ), 274 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.DATE_FORMAT}) AS DATE)", 275 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.DATEINT_FORMAT}) AS INT)", 276 exp.Decode: _decode_sql, 277 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.DATEINT_FORMAT}) AS DATE)", 278 exp.Encode: _encode_sql, 279 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 280 exp.Group: transforms.preprocess([transforms.unalias_group]), 281 exp.Hex: rename_func("TO_HEX"), 282 exp.If: if_sql, 283 exp.ILike: no_ilike_sql, 284 exp.Initcap: _initcap_sql, 285 exp.Lateral: _explode_to_unnest_sql, 286 exp.Left: left_to_substring_sql, 287 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 288 exp.LogicalAnd: rename_func("BOOL_AND"), 289 exp.LogicalOr: rename_func("BOOL_OR"), 290 exp.Pivot: no_pivot_sql, 291 exp.Quantile: _quantile_sql, 292 exp.Right: right_to_substring_sql, 293 exp.SafeDivide: no_safe_divide_sql, 294 exp.Schema: _schema_sql, 295 exp.Select: transforms.preprocess( 296 [ 297 transforms.eliminate_qualify, 298 transforms.eliminate_distinct_on, 299 transforms.explode_to_unnest, 300 ] 301 ), 302 exp.SortArray: _no_sort_array, 303 exp.StrPosition: rename_func("STRPOS"), 304 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 305 exp.StrToTime: _str_to_time_sql, 306 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 307 exp.StructExtract: struct_extract_sql, 308 exp.Table: transforms.preprocess([_unnest_sequence]), 309 exp.TimestampTrunc: timestamptrunc_sql, 310 exp.TimeStrToDate: timestrtotime_sql, 311 exp.TimeStrToTime: timestrtotime_sql, 312 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.TIME_FORMAT}))", 313 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 314 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 315 exp.TryCast: transforms.preprocess([transforms.epoch_cast_to_ts]), 316 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 317 exp.TsOrDsAdd: _ts_or_ds_add_sql, 318 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 319 exp.Unhex: rename_func("FROM_HEX"), 320 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 321 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 322 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 323 exp.VariancePop: rename_func("VAR_POP"), 324 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 325 exp.WithinGroup: transforms.preprocess( 326 [transforms.remove_within_group_for_percentiles] 327 ), 328 } 329 330 def interval_sql(self, expression: exp.Interval) -> str: 331 unit = self.sql(expression, "unit") 332 if expression.this and unit.lower().startswith("week"): 333 return f"({expression.this.name} * INTERVAL '7' day)" 334 return super().interval_sql(expression) 335 336 def transaction_sql(self, expression: exp.Transaction) -> str: 337 modes = expression.args.get("modes") 338 modes = f" {', '.join(modes)}" if modes else "" 339 return f"START TRANSACTION{modes}" 340 341 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 342 start = expression.args["start"] 343 end = expression.args["end"] 344 step = expression.args.get("step") 345 346 if isinstance(start, exp.Cast): 347 target_type = start.to 348 elif isinstance(end, exp.Cast): 349 target_type = end.to 350 else: 351 target_type = None 352 353 if target_type and target_type.is_type("timestamp"): 354 to = target_type.copy() 355 356 if target_type is start.to: 357 end = exp.cast(end, to) 358 else: 359 start = exp.cast(start, to) 360 361 return self.func("SEQUENCE", start, end, step) 362 363 def offset_limit_modifiers( 364 self, expression: exp.Expression, fetch: bool, limit: t.Optional[exp.Fetch | exp.Limit] 365 ) -> t.List[str]: 366 return [ 367 self.sql(expression, "offset"), 368 self.sql(limit), 369 ]
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether or not to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether or not to normalize identifiers to lowercase. Default: False.
- pad: Determines the pad size in a formatted string. Default: 2.
- indent: Determines the indentation size in a formatted string. Default: 2.
- normalize_functions: Whether or not to normalize all function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Determines whether or not the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
341 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 342 start = expression.args["start"] 343 end = expression.args["end"] 344 step = expression.args.get("step") 345 346 if isinstance(start, exp.Cast): 347 target_type = start.to 348 elif isinstance(end, exp.Cast): 349 target_type = end.to 350 else: 351 target_type = None 352 353 if target_type and target_type.is_type("timestamp"): 354 to = target_type.copy() 355 356 if target_type is start.to: 357 end = exp.cast(end, to) 358 else: 359 start = exp.cast(start, to) 360 361 return self.func("SEQUENCE", start, end, step)
def
offset_limit_modifiers( self, expression: sqlglot.expressions.Expression, fetch: bool, limit: Union[sqlglot.expressions.Fetch, sqlglot.expressions.Limit, NoneType]) -> List[str]:
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- createable_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- rawstring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- safeconcat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- safedpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql