sqlglot.dialects.presto
1from __future__ import annotations 2 3import typing as t 4 5from sqlglot import exp, generator, parser, tokens, transforms 6from sqlglot.dialects.dialect import ( 7 Dialect, 8 date_trunc_to_time, 9 format_time_lambda, 10 if_sql, 11 left_to_substring_sql, 12 no_ilike_sql, 13 no_pivot_sql, 14 no_safe_divide_sql, 15 rename_func, 16 right_to_substring_sql, 17 struct_extract_sql, 18 timestamptrunc_sql, 19 timestrtotime_sql, 20) 21from sqlglot.dialects.mysql import MySQL 22from sqlglot.errors import UnsupportedError 23from sqlglot.helper import seq_get 24from sqlglot.tokens import TokenType 25 26 27def _approx_distinct_sql(self: generator.Generator, expression: exp.ApproxDistinct) -> str: 28 accuracy = expression.args.get("accuracy") 29 accuracy = ", " + self.sql(accuracy) if accuracy else "" 30 return f"APPROX_DISTINCT({self.sql(expression, 'this')}{accuracy})" 31 32 33def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: 34 sql = self.datatype_sql(expression) 35 if expression.is_type("timestamptz"): 36 sql = f"{sql} WITH TIME ZONE" 37 return sql 38 39 40def _explode_to_unnest_sql(self: generator.Generator, expression: exp.Lateral) -> str: 41 if isinstance(expression.this, (exp.Explode, exp.Posexplode)): 42 return self.sql( 43 exp.Join( 44 this=exp.Unnest( 45 expressions=[expression.this.this], 46 alias=expression.args.get("alias"), 47 ordinality=isinstance(expression.this, exp.Posexplode), 48 ), 49 kind="cross", 50 ) 51 ) 52 return self.lateral_sql(expression) 53 54 55def _initcap_sql(self: generator.Generator, expression: exp.Initcap) -> str: 56 regex = r"(\w)(\w*)" 57 return f"REGEXP_REPLACE({self.sql(expression, 'this')}, '{regex}', x -> UPPER(x[1]) || LOWER(x[2]))" 58 59 60def _decode_sql(self: generator.Generator, expression: exp.Decode) -> str: 61 _ensure_utf8(expression.args["charset"]) 62 return self.func("FROM_UTF8", expression.this, expression.args.get("replace")) 63 64 65def _encode_sql(self: generator.Generator, expression: exp.Encode) -> str: 66 _ensure_utf8(expression.args["charset"]) 67 return f"TO_UTF8({self.sql(expression, 'this')})" 68 69 70def _no_sort_array(self: generator.Generator, expression: exp.SortArray) -> str: 71 if expression.args.get("asc") == exp.false(): 72 comparator = "(a, b) -> CASE WHEN a < b THEN 1 WHEN a > b THEN -1 ELSE 0 END" 73 else: 74 comparator = None 75 return self.func("ARRAY_SORT", expression.this, comparator) 76 77 78def _schema_sql(self: generator.Generator, expression: exp.Schema) -> str: 79 if isinstance(expression.parent, exp.Property): 80 columns = ", ".join(f"'{c.name}'" for c in expression.expressions) 81 return f"ARRAY[{columns}]" 82 83 if expression.parent: 84 for schema in expression.parent.find_all(exp.Schema): 85 if isinstance(schema.parent, exp.Property): 86 expression = expression.copy() 87 expression.expressions.extend(schema.expressions) 88 89 return self.schema_sql(expression) 90 91 92def _quantile_sql(self: generator.Generator, expression: exp.Quantile) -> str: 93 self.unsupported("Presto does not support exact quantiles") 94 return f"APPROX_PERCENTILE({self.sql(expression, 'this')}, {self.sql(expression, 'quantile')})" 95 96 97def _str_to_time_sql( 98 self: generator.Generator, expression: exp.StrToDate | exp.StrToTime | exp.TsOrDsToDate 99) -> str: 100 return f"DATE_PARSE({self.sql(expression, 'this')}, {self.format_time(expression)})" 101 102 103def _ts_or_ds_to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: 104 time_format = self.format_time(expression) 105 if time_format and time_format not in (Presto.time_format, Presto.date_format): 106 return f"CAST({_str_to_time_sql(self, expression)} AS DATE)" 107 return f"CAST(SUBSTR(CAST({self.sql(expression, 'this')} AS VARCHAR), 1, 10) AS DATE)" 108 109 110def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> str: 111 this = expression.this 112 113 if not isinstance(this, exp.CurrentDate): 114 this = self.func( 115 "DATE_PARSE", 116 self.func( 117 "SUBSTR", 118 this if this.is_string else exp.cast(this, "VARCHAR"), 119 exp.Literal.number(1), 120 exp.Literal.number(10), 121 ), 122 Presto.date_format, 123 ) 124 125 return self.func( 126 "DATE_ADD", 127 exp.Literal.string(expression.text("unit") or "day"), 128 expression.expression, 129 this, 130 ) 131 132 133def _ensure_utf8(charset: exp.Literal) -> None: 134 if charset.name.lower() != "utf-8": 135 raise UnsupportedError(f"Unsupported charset {charset}") 136 137 138def _approx_percentile(args: t.List) -> exp.Expression: 139 if len(args) == 4: 140 return exp.ApproxQuantile( 141 this=seq_get(args, 0), 142 weight=seq_get(args, 1), 143 quantile=seq_get(args, 2), 144 accuracy=seq_get(args, 3), 145 ) 146 if len(args) == 3: 147 return exp.ApproxQuantile( 148 this=seq_get(args, 0), 149 quantile=seq_get(args, 1), 150 accuracy=seq_get(args, 2), 151 ) 152 return exp.ApproxQuantile.from_arg_list(args) 153 154 155def _from_unixtime(args: t.List) -> exp.Expression: 156 if len(args) == 3: 157 return exp.UnixToTime( 158 this=seq_get(args, 0), 159 hours=seq_get(args, 1), 160 minutes=seq_get(args, 2), 161 ) 162 if len(args) == 2: 163 return exp.UnixToTime( 164 this=seq_get(args, 0), 165 zone=seq_get(args, 1), 166 ) 167 return exp.UnixToTime.from_arg_list(args) 168 169 170def _unnest_sequence(expression: exp.Expression) -> exp.Expression: 171 if isinstance(expression, exp.Table): 172 if isinstance(expression.this, exp.GenerateSeries): 173 unnest = exp.Unnest(expressions=[expression.this]) 174 175 if expression.alias: 176 return exp.alias_( 177 unnest, 178 alias="_u", 179 table=[expression.alias], 180 copy=False, 181 ) 182 return unnest 183 return expression 184 185 186class Presto(Dialect): 187 index_offset = 1 188 null_ordering = "nulls_are_last" 189 time_format = MySQL.time_format 190 time_mapping = MySQL.time_mapping 191 192 class Tokenizer(tokens.Tokenizer): 193 KEYWORDS = { 194 **tokens.Tokenizer.KEYWORDS, 195 "START": TokenType.BEGIN, 196 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 197 "ROW": TokenType.STRUCT, 198 } 199 200 class Parser(parser.Parser): 201 FUNCTIONS = { 202 **parser.Parser.FUNCTIONS, 203 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 204 "APPROX_PERCENTILE": _approx_percentile, 205 "CARDINALITY": exp.ArraySize.from_arg_list, 206 "CONTAINS": exp.ArrayContains.from_arg_list, 207 "DATE_ADD": lambda args: exp.DateAdd( 208 this=seq_get(args, 2), 209 expression=seq_get(args, 1), 210 unit=seq_get(args, 0), 211 ), 212 "DATE_DIFF": lambda args: exp.DateDiff( 213 this=seq_get(args, 2), 214 expression=seq_get(args, 1), 215 unit=seq_get(args, 0), 216 ), 217 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 218 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 219 "DATE_TRUNC": date_trunc_to_time, 220 "FROM_HEX": exp.Unhex.from_arg_list, 221 "FROM_UNIXTIME": _from_unixtime, 222 "FROM_UTF8": lambda args: exp.Decode( 223 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 224 ), 225 "NOW": exp.CurrentTimestamp.from_arg_list, 226 "SEQUENCE": exp.GenerateSeries.from_arg_list, 227 "STRPOS": lambda args: exp.StrPosition( 228 this=seq_get(args, 0), 229 substr=seq_get(args, 1), 230 instance=seq_get(args, 2), 231 ), 232 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 233 "TO_HEX": exp.Hex.from_arg_list, 234 "TO_UTF8": lambda args: exp.Encode( 235 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 236 ), 237 } 238 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 239 FUNCTION_PARSERS.pop("TRIM") 240 241 class Generator(generator.Generator): 242 INTERVAL_ALLOWS_PLURAL_FORM = False 243 JOIN_HINTS = False 244 TABLE_HINTS = False 245 IS_BOOL = False 246 STRUCT_DELIMITER = ("(", ")") 247 248 PROPERTIES_LOCATION = { 249 **generator.Generator.PROPERTIES_LOCATION, 250 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 251 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 252 } 253 254 TYPE_MAPPING = { 255 **generator.Generator.TYPE_MAPPING, 256 exp.DataType.Type.INT: "INTEGER", 257 exp.DataType.Type.FLOAT: "REAL", 258 exp.DataType.Type.BINARY: "VARBINARY", 259 exp.DataType.Type.TEXT: "VARCHAR", 260 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 261 exp.DataType.Type.STRUCT: "ROW", 262 } 263 264 TRANSFORMS = { 265 **generator.Generator.TRANSFORMS, 266 exp.ApproxDistinct: _approx_distinct_sql, 267 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 268 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 269 exp.ArrayConcat: rename_func("CONCAT"), 270 exp.ArrayContains: rename_func("CONTAINS"), 271 exp.ArraySize: rename_func("CARDINALITY"), 272 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 273 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 275 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 276 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 277 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 278 exp.Cast: transforms.preprocess([transforms.epoch_cast_to_ts]), 279 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 280 exp.DataType: _datatype_sql, 281 exp.DateAdd: lambda self, e: self.func( 282 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 283 ), 284 exp.DateDiff: lambda self, e: self.func( 285 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 286 ), 287 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 288 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 289 exp.Decode: _decode_sql, 290 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 291 exp.Encode: _encode_sql, 292 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 293 exp.Group: transforms.preprocess([transforms.unalias_group]), 294 exp.Hex: rename_func("TO_HEX"), 295 exp.If: if_sql, 296 exp.ILike: no_ilike_sql, 297 exp.Initcap: _initcap_sql, 298 exp.Lateral: _explode_to_unnest_sql, 299 exp.Left: left_to_substring_sql, 300 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 301 exp.LogicalAnd: rename_func("BOOL_AND"), 302 exp.LogicalOr: rename_func("BOOL_OR"), 303 exp.Pivot: no_pivot_sql, 304 exp.Quantile: _quantile_sql, 305 exp.Right: right_to_substring_sql, 306 exp.SafeDivide: no_safe_divide_sql, 307 exp.Schema: _schema_sql, 308 exp.Select: transforms.preprocess( 309 [ 310 transforms.eliminate_qualify, 311 transforms.eliminate_distinct_on, 312 transforms.explode_to_unnest, 313 ] 314 ), 315 exp.SortArray: _no_sort_array, 316 exp.StrPosition: rename_func("STRPOS"), 317 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 318 exp.StrToTime: _str_to_time_sql, 319 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 320 exp.StructExtract: struct_extract_sql, 321 exp.Table: transforms.preprocess([_unnest_sequence]), 322 exp.TimestampTrunc: timestamptrunc_sql, 323 exp.TimeStrToDate: timestrtotime_sql, 324 exp.TimeStrToTime: timestrtotime_sql, 325 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 326 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 327 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 328 exp.TryCast: transforms.preprocess([transforms.epoch_cast_to_ts]), 329 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 330 exp.TsOrDsAdd: _ts_or_ds_add_sql, 331 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 332 exp.Unhex: rename_func("FROM_HEX"), 333 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 334 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 335 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 336 exp.VariancePop: rename_func("VAR_POP"), 337 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 338 exp.WithinGroup: transforms.preprocess( 339 [transforms.remove_within_group_for_percentiles] 340 ), 341 } 342 343 def interval_sql(self, expression: exp.Interval) -> str: 344 unit = self.sql(expression, "unit") 345 if expression.this and unit.lower().startswith("week"): 346 return f"({expression.this.name} * INTERVAL '7' day)" 347 return super().interval_sql(expression) 348 349 def transaction_sql(self, expression: exp.Transaction) -> str: 350 modes = expression.args.get("modes") 351 modes = f" {', '.join(modes)}" if modes else "" 352 return f"START TRANSACTION{modes}" 353 354 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 355 start = expression.args["start"] 356 end = expression.args["end"] 357 step = expression.args.get("step") 358 359 if isinstance(start, exp.Cast): 360 target_type = start.to 361 elif isinstance(end, exp.Cast): 362 target_type = end.to 363 else: 364 target_type = None 365 366 if target_type and target_type.is_type("timestamp"): 367 to = target_type.copy() 368 369 if target_type is start.to: 370 end = exp.Cast(this=end, to=to) 371 else: 372 start = exp.Cast(this=start, to=to) 373 374 return self.func("SEQUENCE", start, end, step)
187class Presto(Dialect): 188 index_offset = 1 189 null_ordering = "nulls_are_last" 190 time_format = MySQL.time_format 191 time_mapping = MySQL.time_mapping 192 193 class Tokenizer(tokens.Tokenizer): 194 KEYWORDS = { 195 **tokens.Tokenizer.KEYWORDS, 196 "START": TokenType.BEGIN, 197 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 198 "ROW": TokenType.STRUCT, 199 } 200 201 class Parser(parser.Parser): 202 FUNCTIONS = { 203 **parser.Parser.FUNCTIONS, 204 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 205 "APPROX_PERCENTILE": _approx_percentile, 206 "CARDINALITY": exp.ArraySize.from_arg_list, 207 "CONTAINS": exp.ArrayContains.from_arg_list, 208 "DATE_ADD": lambda args: exp.DateAdd( 209 this=seq_get(args, 2), 210 expression=seq_get(args, 1), 211 unit=seq_get(args, 0), 212 ), 213 "DATE_DIFF": lambda args: exp.DateDiff( 214 this=seq_get(args, 2), 215 expression=seq_get(args, 1), 216 unit=seq_get(args, 0), 217 ), 218 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 219 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 220 "DATE_TRUNC": date_trunc_to_time, 221 "FROM_HEX": exp.Unhex.from_arg_list, 222 "FROM_UNIXTIME": _from_unixtime, 223 "FROM_UTF8": lambda args: exp.Decode( 224 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 225 ), 226 "NOW": exp.CurrentTimestamp.from_arg_list, 227 "SEQUENCE": exp.GenerateSeries.from_arg_list, 228 "STRPOS": lambda args: exp.StrPosition( 229 this=seq_get(args, 0), 230 substr=seq_get(args, 1), 231 instance=seq_get(args, 2), 232 ), 233 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 234 "TO_HEX": exp.Hex.from_arg_list, 235 "TO_UTF8": lambda args: exp.Encode( 236 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 237 ), 238 } 239 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 240 FUNCTION_PARSERS.pop("TRIM") 241 242 class Generator(generator.Generator): 243 INTERVAL_ALLOWS_PLURAL_FORM = False 244 JOIN_HINTS = False 245 TABLE_HINTS = False 246 IS_BOOL = False 247 STRUCT_DELIMITER = ("(", ")") 248 249 PROPERTIES_LOCATION = { 250 **generator.Generator.PROPERTIES_LOCATION, 251 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 252 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 253 } 254 255 TYPE_MAPPING = { 256 **generator.Generator.TYPE_MAPPING, 257 exp.DataType.Type.INT: "INTEGER", 258 exp.DataType.Type.FLOAT: "REAL", 259 exp.DataType.Type.BINARY: "VARBINARY", 260 exp.DataType.Type.TEXT: "VARCHAR", 261 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 262 exp.DataType.Type.STRUCT: "ROW", 263 } 264 265 TRANSFORMS = { 266 **generator.Generator.TRANSFORMS, 267 exp.ApproxDistinct: _approx_distinct_sql, 268 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 269 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 270 exp.ArrayConcat: rename_func("CONCAT"), 271 exp.ArrayContains: rename_func("CONTAINS"), 272 exp.ArraySize: rename_func("CARDINALITY"), 273 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 275 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 276 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 277 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 278 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 279 exp.Cast: transforms.preprocess([transforms.epoch_cast_to_ts]), 280 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 281 exp.DataType: _datatype_sql, 282 exp.DateAdd: lambda self, e: self.func( 283 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 284 ), 285 exp.DateDiff: lambda self, e: self.func( 286 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 287 ), 288 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 289 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 290 exp.Decode: _decode_sql, 291 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 292 exp.Encode: _encode_sql, 293 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 294 exp.Group: transforms.preprocess([transforms.unalias_group]), 295 exp.Hex: rename_func("TO_HEX"), 296 exp.If: if_sql, 297 exp.ILike: no_ilike_sql, 298 exp.Initcap: _initcap_sql, 299 exp.Lateral: _explode_to_unnest_sql, 300 exp.Left: left_to_substring_sql, 301 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 302 exp.LogicalAnd: rename_func("BOOL_AND"), 303 exp.LogicalOr: rename_func("BOOL_OR"), 304 exp.Pivot: no_pivot_sql, 305 exp.Quantile: _quantile_sql, 306 exp.Right: right_to_substring_sql, 307 exp.SafeDivide: no_safe_divide_sql, 308 exp.Schema: _schema_sql, 309 exp.Select: transforms.preprocess( 310 [ 311 transforms.eliminate_qualify, 312 transforms.eliminate_distinct_on, 313 transforms.explode_to_unnest, 314 ] 315 ), 316 exp.SortArray: _no_sort_array, 317 exp.StrPosition: rename_func("STRPOS"), 318 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 319 exp.StrToTime: _str_to_time_sql, 320 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 321 exp.StructExtract: struct_extract_sql, 322 exp.Table: transforms.preprocess([_unnest_sequence]), 323 exp.TimestampTrunc: timestamptrunc_sql, 324 exp.TimeStrToDate: timestrtotime_sql, 325 exp.TimeStrToTime: timestrtotime_sql, 326 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 327 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 328 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 329 exp.TryCast: transforms.preprocess([transforms.epoch_cast_to_ts]), 330 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 331 exp.TsOrDsAdd: _ts_or_ds_add_sql, 332 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 333 exp.Unhex: rename_func("FROM_HEX"), 334 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 335 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 336 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 337 exp.VariancePop: rename_func("VAR_POP"), 338 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 339 exp.WithinGroup: transforms.preprocess( 340 [transforms.remove_within_group_for_percentiles] 341 ), 342 } 343 344 def interval_sql(self, expression: exp.Interval) -> str: 345 unit = self.sql(expression, "unit") 346 if expression.this and unit.lower().startswith("week"): 347 return f"({expression.this.name} * INTERVAL '7' day)" 348 return super().interval_sql(expression) 349 350 def transaction_sql(self, expression: exp.Transaction) -> str: 351 modes = expression.args.get("modes") 352 modes = f" {', '.join(modes)}" if modes else "" 353 return f"START TRANSACTION{modes}" 354 355 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 356 start = expression.args["start"] 357 end = expression.args["end"] 358 step = expression.args.get("step") 359 360 if isinstance(start, exp.Cast): 361 target_type = start.to 362 elif isinstance(end, exp.Cast): 363 target_type = end.to 364 else: 365 target_type = None 366 367 if target_type and target_type.is_type("timestamp"): 368 to = target_type.copy() 369 370 if target_type is start.to: 371 end = exp.Cast(this=end, to=to) 372 else: 373 start = exp.Cast(this=start, to=to) 374 375 return self.func("SEQUENCE", start, end, step)
193 class Tokenizer(tokens.Tokenizer): 194 KEYWORDS = { 195 **tokens.Tokenizer.KEYWORDS, 196 "START": TokenType.BEGIN, 197 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 198 "ROW": TokenType.STRUCT, 199 }
Inherited Members
201 class Parser(parser.Parser): 202 FUNCTIONS = { 203 **parser.Parser.FUNCTIONS, 204 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 205 "APPROX_PERCENTILE": _approx_percentile, 206 "CARDINALITY": exp.ArraySize.from_arg_list, 207 "CONTAINS": exp.ArrayContains.from_arg_list, 208 "DATE_ADD": lambda args: exp.DateAdd( 209 this=seq_get(args, 2), 210 expression=seq_get(args, 1), 211 unit=seq_get(args, 0), 212 ), 213 "DATE_DIFF": lambda args: exp.DateDiff( 214 this=seq_get(args, 2), 215 expression=seq_get(args, 1), 216 unit=seq_get(args, 0), 217 ), 218 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 219 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 220 "DATE_TRUNC": date_trunc_to_time, 221 "FROM_HEX": exp.Unhex.from_arg_list, 222 "FROM_UNIXTIME": _from_unixtime, 223 "FROM_UTF8": lambda args: exp.Decode( 224 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 225 ), 226 "NOW": exp.CurrentTimestamp.from_arg_list, 227 "SEQUENCE": exp.GenerateSeries.from_arg_list, 228 "STRPOS": lambda args: exp.StrPosition( 229 this=seq_get(args, 0), 230 substr=seq_get(args, 1), 231 instance=seq_get(args, 2), 232 ), 233 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 234 "TO_HEX": exp.Hex.from_arg_list, 235 "TO_UTF8": lambda args: exp.Encode( 236 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 237 ), 238 } 239 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 240 FUNCTION_PARSERS.pop("TRIM")
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
242 class Generator(generator.Generator): 243 INTERVAL_ALLOWS_PLURAL_FORM = False 244 JOIN_HINTS = False 245 TABLE_HINTS = False 246 IS_BOOL = False 247 STRUCT_DELIMITER = ("(", ")") 248 249 PROPERTIES_LOCATION = { 250 **generator.Generator.PROPERTIES_LOCATION, 251 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 252 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 253 } 254 255 TYPE_MAPPING = { 256 **generator.Generator.TYPE_MAPPING, 257 exp.DataType.Type.INT: "INTEGER", 258 exp.DataType.Type.FLOAT: "REAL", 259 exp.DataType.Type.BINARY: "VARBINARY", 260 exp.DataType.Type.TEXT: "VARCHAR", 261 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 262 exp.DataType.Type.STRUCT: "ROW", 263 } 264 265 TRANSFORMS = { 266 **generator.Generator.TRANSFORMS, 267 exp.ApproxDistinct: _approx_distinct_sql, 268 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 269 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 270 exp.ArrayConcat: rename_func("CONCAT"), 271 exp.ArrayContains: rename_func("CONTAINS"), 272 exp.ArraySize: rename_func("CARDINALITY"), 273 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 275 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 276 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 277 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 278 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 279 exp.Cast: transforms.preprocess([transforms.epoch_cast_to_ts]), 280 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 281 exp.DataType: _datatype_sql, 282 exp.DateAdd: lambda self, e: self.func( 283 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 284 ), 285 exp.DateDiff: lambda self, e: self.func( 286 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 287 ), 288 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 289 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 290 exp.Decode: _decode_sql, 291 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 292 exp.Encode: _encode_sql, 293 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 294 exp.Group: transforms.preprocess([transforms.unalias_group]), 295 exp.Hex: rename_func("TO_HEX"), 296 exp.If: if_sql, 297 exp.ILike: no_ilike_sql, 298 exp.Initcap: _initcap_sql, 299 exp.Lateral: _explode_to_unnest_sql, 300 exp.Left: left_to_substring_sql, 301 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 302 exp.LogicalAnd: rename_func("BOOL_AND"), 303 exp.LogicalOr: rename_func("BOOL_OR"), 304 exp.Pivot: no_pivot_sql, 305 exp.Quantile: _quantile_sql, 306 exp.Right: right_to_substring_sql, 307 exp.SafeDivide: no_safe_divide_sql, 308 exp.Schema: _schema_sql, 309 exp.Select: transforms.preprocess( 310 [ 311 transforms.eliminate_qualify, 312 transforms.eliminate_distinct_on, 313 transforms.explode_to_unnest, 314 ] 315 ), 316 exp.SortArray: _no_sort_array, 317 exp.StrPosition: rename_func("STRPOS"), 318 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 319 exp.StrToTime: _str_to_time_sql, 320 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 321 exp.StructExtract: struct_extract_sql, 322 exp.Table: transforms.preprocess([_unnest_sequence]), 323 exp.TimestampTrunc: timestamptrunc_sql, 324 exp.TimeStrToDate: timestrtotime_sql, 325 exp.TimeStrToTime: timestrtotime_sql, 326 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 327 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 328 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 329 exp.TryCast: transforms.preprocess([transforms.epoch_cast_to_ts]), 330 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 331 exp.TsOrDsAdd: _ts_or_ds_add_sql, 332 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 333 exp.Unhex: rename_func("FROM_HEX"), 334 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 335 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 336 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 337 exp.VariancePop: rename_func("VAR_POP"), 338 exp.With: transforms.preprocess([transforms.add_recursive_cte_column_names]), 339 exp.WithinGroup: transforms.preprocess( 340 [transforms.remove_within_group_for_percentiles] 341 ), 342 } 343 344 def interval_sql(self, expression: exp.Interval) -> str: 345 unit = self.sql(expression, "unit") 346 if expression.this and unit.lower().startswith("week"): 347 return f"({expression.this.name} * INTERVAL '7' day)" 348 return super().interval_sql(expression) 349 350 def transaction_sql(self, expression: exp.Transaction) -> str: 351 modes = expression.args.get("modes") 352 modes = f" {', '.join(modes)}" if modes else "" 353 return f"START TRANSACTION{modes}" 354 355 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 356 start = expression.args["start"] 357 end = expression.args["end"] 358 step = expression.args.get("step") 359 360 if isinstance(start, exp.Cast): 361 target_type = start.to 362 elif isinstance(end, exp.Cast): 363 target_type = end.to 364 else: 365 target_type = None 366 367 if target_type and target_type.is_type("timestamp"): 368 to = target_type.copy() 369 370 if target_type is start.to: 371 end = exp.Cast(this=end, to=to) 372 else: 373 start = exp.Cast(this=start, to=to) 374 375 return self.func("SEQUENCE", start, end, step)
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- bit_start (str): specifies which starting character to use to delimit bit literals. Default: None.
- bit_end (str): specifies which ending character to use to delimit bit literals. Default: None.
- hex_start (str): specifies which starting character to use to delimit hex literals. Default: None.
- hex_end (str): specifies which ending character to use to delimit hex literals. Default: None.
- byte_start (str): specifies which starting character to use to delimit byte literals. Default: None.
- byte_end (str): specifies which ending character to use to delimit byte literals. Default: None.
- raw_start (str): specifies which starting character to use to delimit raw literals. Default: None.
- raw_end (str): specifies which ending character to use to delimit raw literals. Default: None.
- identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- identifiers_can_start_with_digit (bool): if an unquoted identifier can start with digit Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
355 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 356 start = expression.args["start"] 357 end = expression.args["end"] 358 step = expression.args.get("step") 359 360 if isinstance(start, exp.Cast): 361 target_type = start.to 362 elif isinstance(end, exp.Cast): 363 target_type = end.to 364 else: 365 target_type = None 366 367 if target_type and target_type.is_type("timestamp"): 368 to = target_type.copy() 369 370 if target_type is start.to: 371 end = exp.Cast(this=end, to=to) 372 else: 373 start = exp.Cast(this=start, to=to) 374 375 return self.func("SEQUENCE", start, end, step)
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- createable_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- rawstring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- oncluster_sql