sqlglot.dialects.presto
1from __future__ import annotations 2 3import typing as t 4 5from sqlglot import exp, generator, parser, tokens, transforms 6from sqlglot.dialects.dialect import ( 7 Dialect, 8 date_trunc_to_time, 9 format_time_lambda, 10 if_sql, 11 no_ilike_sql, 12 no_safe_divide_sql, 13 rename_func, 14 struct_extract_sql, 15 timestamptrunc_sql, 16 timestrtotime_sql, 17) 18from sqlglot.dialects.mysql import MySQL 19from sqlglot.errors import UnsupportedError 20from sqlglot.helper import seq_get 21from sqlglot.tokens import TokenType 22 23 24def _approx_distinct_sql(self: generator.Generator, expression: exp.ApproxDistinct) -> str: 25 accuracy = expression.args.get("accuracy") 26 accuracy = ", " + self.sql(accuracy) if accuracy else "" 27 return f"APPROX_DISTINCT({self.sql(expression, 'this')}{accuracy})" 28 29 30def _datatype_sql(self: generator.Generator, expression: exp.DataType) -> str: 31 sql = self.datatype_sql(expression) 32 if expression.this == exp.DataType.Type.TIMESTAMPTZ: 33 sql = f"{sql} WITH TIME ZONE" 34 return sql 35 36 37def _explode_to_unnest_sql(self: generator.Generator, expression: exp.Lateral) -> str: 38 if isinstance(expression.this, (exp.Explode, exp.Posexplode)): 39 return self.sql( 40 exp.Join( 41 this=exp.Unnest( 42 expressions=[expression.this.this], 43 alias=expression.args.get("alias"), 44 ordinality=isinstance(expression.this, exp.Posexplode), 45 ), 46 kind="cross", 47 ) 48 ) 49 return self.lateral_sql(expression) 50 51 52def _initcap_sql(self: generator.Generator, expression: exp.Initcap) -> str: 53 regex = r"(\w)(\w*)" 54 return f"REGEXP_REPLACE({self.sql(expression, 'this')}, '{regex}', x -> UPPER(x[1]) || LOWER(x[2]))" 55 56 57def _decode_sql(self: generator.Generator, expression: exp.Decode) -> str: 58 _ensure_utf8(expression.args["charset"]) 59 return self.func("FROM_UTF8", expression.this, expression.args.get("replace")) 60 61 62def _encode_sql(self: generator.Generator, expression: exp.Encode) -> str: 63 _ensure_utf8(expression.args["charset"]) 64 return f"TO_UTF8({self.sql(expression, 'this')})" 65 66 67def _no_sort_array(self: generator.Generator, expression: exp.SortArray) -> str: 68 if expression.args.get("asc") == exp.false(): 69 comparator = "(a, b) -> CASE WHEN a < b THEN 1 WHEN a > b THEN -1 ELSE 0 END" 70 else: 71 comparator = None 72 return self.func("ARRAY_SORT", expression.this, comparator) 73 74 75def _schema_sql(self: generator.Generator, expression: exp.Schema) -> str: 76 if isinstance(expression.parent, exp.Property): 77 columns = ", ".join(f"'{c.name}'" for c in expression.expressions) 78 return f"ARRAY[{columns}]" 79 80 if expression.parent: 81 for schema in expression.parent.find_all(exp.Schema): 82 if isinstance(schema.parent, exp.Property): 83 expression = expression.copy() 84 expression.expressions.extend(schema.expressions) 85 86 return self.schema_sql(expression) 87 88 89def _quantile_sql(self: generator.Generator, expression: exp.Quantile) -> str: 90 self.unsupported("Presto does not support exact quantiles") 91 return f"APPROX_PERCENTILE({self.sql(expression, 'this')}, {self.sql(expression, 'quantile')})" 92 93 94def _str_to_time_sql( 95 self: generator.Generator, expression: exp.StrToDate | exp.StrToTime | exp.TsOrDsToDate 96) -> str: 97 return f"DATE_PARSE({self.sql(expression, 'this')}, {self.format_time(expression)})" 98 99 100def _ts_or_ds_to_date_sql(self: generator.Generator, expression: exp.TsOrDsToDate) -> str: 101 time_format = self.format_time(expression) 102 if time_format and time_format not in (Presto.time_format, Presto.date_format): 103 return f"CAST({_str_to_time_sql(self, expression)} AS DATE)" 104 return f"CAST(SUBSTR(CAST({self.sql(expression, 'this')} AS VARCHAR), 1, 10) AS DATE)" 105 106 107def _ts_or_ds_add_sql(self: generator.Generator, expression: exp.TsOrDsAdd) -> str: 108 this = expression.this 109 110 if not isinstance(this, exp.CurrentDate): 111 this = self.func( 112 "DATE_PARSE", 113 self.func( 114 "SUBSTR", 115 this if this.is_string else exp.cast(this, "VARCHAR"), 116 exp.Literal.number(1), 117 exp.Literal.number(10), 118 ), 119 Presto.date_format, 120 ) 121 122 return self.func( 123 "DATE_ADD", 124 exp.Literal.string(expression.text("unit") or "day"), 125 expression.expression, 126 this, 127 ) 128 129 130def _ensure_utf8(charset: exp.Literal) -> None: 131 if charset.name.lower() != "utf-8": 132 raise UnsupportedError(f"Unsupported charset {charset}") 133 134 135def _approx_percentile(args: t.Sequence) -> exp.Expression: 136 if len(args) == 4: 137 return exp.ApproxQuantile( 138 this=seq_get(args, 0), 139 weight=seq_get(args, 1), 140 quantile=seq_get(args, 2), 141 accuracy=seq_get(args, 3), 142 ) 143 if len(args) == 3: 144 return exp.ApproxQuantile( 145 this=seq_get(args, 0), 146 quantile=seq_get(args, 1), 147 accuracy=seq_get(args, 2), 148 ) 149 return exp.ApproxQuantile.from_arg_list(args) 150 151 152def _from_unixtime(args: t.Sequence) -> exp.Expression: 153 if len(args) == 3: 154 return exp.UnixToTime( 155 this=seq_get(args, 0), 156 hours=seq_get(args, 1), 157 minutes=seq_get(args, 2), 158 ) 159 if len(args) == 2: 160 return exp.UnixToTime( 161 this=seq_get(args, 0), 162 zone=seq_get(args, 1), 163 ) 164 return exp.UnixToTime.from_arg_list(args) 165 166 167def _unnest_sequence(expression: exp.Expression) -> exp.Expression: 168 if isinstance(expression, exp.Table): 169 if isinstance(expression.this, exp.GenerateSeries): 170 unnest = exp.Unnest(expressions=[expression.this]) 171 172 if expression.alias: 173 return exp.alias_( 174 unnest, 175 alias="_u", 176 table=[expression.alias], 177 copy=False, 178 ) 179 return unnest 180 return expression 181 182 183class Presto(Dialect): 184 index_offset = 1 185 null_ordering = "nulls_are_last" 186 time_format = MySQL.time_format # type: ignore 187 time_mapping = MySQL.time_mapping # type: ignore 188 189 class Tokenizer(tokens.Tokenizer): 190 KEYWORDS = { 191 **tokens.Tokenizer.KEYWORDS, 192 "START": TokenType.BEGIN, 193 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 194 "ROW": TokenType.STRUCT, 195 } 196 197 class Parser(parser.Parser): 198 FUNCTIONS = { 199 **parser.Parser.FUNCTIONS, # type: ignore 200 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 201 "APPROX_PERCENTILE": _approx_percentile, 202 "CARDINALITY": exp.ArraySize.from_arg_list, 203 "CONTAINS": exp.ArrayContains.from_arg_list, 204 "DATE_ADD": lambda args: exp.DateAdd( 205 this=seq_get(args, 2), 206 expression=seq_get(args, 1), 207 unit=seq_get(args, 0), 208 ), 209 "DATE_DIFF": lambda args: exp.DateDiff( 210 this=seq_get(args, 2), 211 expression=seq_get(args, 1), 212 unit=seq_get(args, 0), 213 ), 214 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 215 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 216 "DATE_TRUNC": date_trunc_to_time, 217 "FROM_HEX": exp.Unhex.from_arg_list, 218 "FROM_UNIXTIME": _from_unixtime, 219 "FROM_UTF8": lambda args: exp.Decode( 220 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 221 ), 222 "NOW": exp.CurrentTimestamp.from_arg_list, 223 "SEQUENCE": exp.GenerateSeries.from_arg_list, 224 "STRPOS": lambda args: exp.StrPosition( 225 this=seq_get(args, 0), 226 substr=seq_get(args, 1), 227 instance=seq_get(args, 2), 228 ), 229 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 230 "TO_HEX": exp.Hex.from_arg_list, 231 "TO_UTF8": lambda args: exp.Encode( 232 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 233 ), 234 } 235 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 236 FUNCTION_PARSERS.pop("TRIM") 237 238 class Generator(generator.Generator): 239 INTERVAL_ALLOWS_PLURAL_FORM = False 240 JOIN_HINTS = False 241 TABLE_HINTS = False 242 STRUCT_DELIMITER = ("(", ")") 243 244 PROPERTIES_LOCATION = { 245 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 246 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 247 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 248 } 249 250 TYPE_MAPPING = { 251 **generator.Generator.TYPE_MAPPING, # type: ignore 252 exp.DataType.Type.INT: "INTEGER", 253 exp.DataType.Type.FLOAT: "REAL", 254 exp.DataType.Type.BINARY: "VARBINARY", 255 exp.DataType.Type.TEXT: "VARCHAR", 256 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 257 exp.DataType.Type.STRUCT: "ROW", 258 } 259 260 TRANSFORMS = { 261 **generator.Generator.TRANSFORMS, # type: ignore 262 exp.ApproxDistinct: _approx_distinct_sql, 263 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 264 exp.ArrayConcat: rename_func("CONCAT"), 265 exp.ArrayContains: rename_func("CONTAINS"), 266 exp.ArraySize: rename_func("CARDINALITY"), 267 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 268 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 269 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 270 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 271 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 272 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 273 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 274 exp.DataType: _datatype_sql, 275 exp.DateAdd: lambda self, e: self.func( 276 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 277 ), 278 exp.DateDiff: lambda self, e: self.func( 279 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 280 ), 281 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 282 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 283 exp.Decode: _decode_sql, 284 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 285 exp.Encode: _encode_sql, 286 exp.Group: transforms.preprocess([transforms.unalias_group]), 287 exp.Hex: rename_func("TO_HEX"), 288 exp.If: if_sql, 289 exp.ILike: no_ilike_sql, 290 exp.Initcap: _initcap_sql, 291 exp.Lateral: _explode_to_unnest_sql, 292 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 293 exp.LogicalOr: rename_func("BOOL_OR"), 294 exp.LogicalAnd: rename_func("BOOL_AND"), 295 exp.Quantile: _quantile_sql, 296 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 297 exp.SafeDivide: no_safe_divide_sql, 298 exp.Schema: _schema_sql, 299 exp.Select: transforms.preprocess( 300 [ 301 transforms.eliminate_qualify, 302 transforms.eliminate_distinct_on, 303 transforms.explode_to_unnest, 304 ] 305 ), 306 exp.SortArray: _no_sort_array, 307 exp.StrPosition: rename_func("STRPOS"), 308 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 309 exp.StrToTime: _str_to_time_sql, 310 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 311 exp.StructExtract: struct_extract_sql, 312 exp.TableFormatProperty: lambda self, e: f"TABLE_FORMAT='{e.name.upper()}'", 313 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 314 exp.Table: transforms.preprocess([_unnest_sequence]), 315 exp.TimestampTrunc: timestamptrunc_sql, 316 exp.TimeStrToDate: timestrtotime_sql, 317 exp.TimeStrToTime: timestrtotime_sql, 318 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 319 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 320 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 321 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 322 exp.TsOrDsAdd: _ts_or_ds_add_sql, 323 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 324 exp.Unhex: rename_func("FROM_HEX"), 325 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 326 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 327 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 328 exp.VariancePop: rename_func("VAR_POP"), 329 exp.WithinGroup: transforms.preprocess( 330 [transforms.remove_within_group_for_percentiles] 331 ), 332 } 333 334 def interval_sql(self, expression: exp.Interval) -> str: 335 unit = self.sql(expression, "unit") 336 if expression.this and unit.lower().startswith("week"): 337 return f"({expression.this.name} * INTERVAL '7' day)" 338 return super().interval_sql(expression) 339 340 def transaction_sql(self, expression: exp.Transaction) -> str: 341 modes = expression.args.get("modes") 342 modes = f" {', '.join(modes)}" if modes else "" 343 return f"START TRANSACTION{modes}" 344 345 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 346 start = expression.args["start"] 347 end = expression.args["end"] 348 step = expression.args.get("step") 349 350 if isinstance(start, exp.Cast): 351 target_type = start.to 352 elif isinstance(end, exp.Cast): 353 target_type = end.to 354 else: 355 target_type = None 356 357 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 358 to = target_type.copy() 359 360 if target_type is start.to: 361 end = exp.Cast(this=end, to=to) 362 else: 363 start = exp.Cast(this=start, to=to) 364 365 return self.func("SEQUENCE", start, end, step)
184class Presto(Dialect): 185 index_offset = 1 186 null_ordering = "nulls_are_last" 187 time_format = MySQL.time_format # type: ignore 188 time_mapping = MySQL.time_mapping # type: ignore 189 190 class Tokenizer(tokens.Tokenizer): 191 KEYWORDS = { 192 **tokens.Tokenizer.KEYWORDS, 193 "START": TokenType.BEGIN, 194 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 195 "ROW": TokenType.STRUCT, 196 } 197 198 class Parser(parser.Parser): 199 FUNCTIONS = { 200 **parser.Parser.FUNCTIONS, # type: ignore 201 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 202 "APPROX_PERCENTILE": _approx_percentile, 203 "CARDINALITY": exp.ArraySize.from_arg_list, 204 "CONTAINS": exp.ArrayContains.from_arg_list, 205 "DATE_ADD": lambda args: exp.DateAdd( 206 this=seq_get(args, 2), 207 expression=seq_get(args, 1), 208 unit=seq_get(args, 0), 209 ), 210 "DATE_DIFF": lambda args: exp.DateDiff( 211 this=seq_get(args, 2), 212 expression=seq_get(args, 1), 213 unit=seq_get(args, 0), 214 ), 215 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 216 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 217 "DATE_TRUNC": date_trunc_to_time, 218 "FROM_HEX": exp.Unhex.from_arg_list, 219 "FROM_UNIXTIME": _from_unixtime, 220 "FROM_UTF8": lambda args: exp.Decode( 221 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 222 ), 223 "NOW": exp.CurrentTimestamp.from_arg_list, 224 "SEQUENCE": exp.GenerateSeries.from_arg_list, 225 "STRPOS": lambda args: exp.StrPosition( 226 this=seq_get(args, 0), 227 substr=seq_get(args, 1), 228 instance=seq_get(args, 2), 229 ), 230 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 231 "TO_HEX": exp.Hex.from_arg_list, 232 "TO_UTF8": lambda args: exp.Encode( 233 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 234 ), 235 } 236 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 237 FUNCTION_PARSERS.pop("TRIM") 238 239 class Generator(generator.Generator): 240 INTERVAL_ALLOWS_PLURAL_FORM = False 241 JOIN_HINTS = False 242 TABLE_HINTS = False 243 STRUCT_DELIMITER = ("(", ")") 244 245 PROPERTIES_LOCATION = { 246 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 247 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 248 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 249 } 250 251 TYPE_MAPPING = { 252 **generator.Generator.TYPE_MAPPING, # type: ignore 253 exp.DataType.Type.INT: "INTEGER", 254 exp.DataType.Type.FLOAT: "REAL", 255 exp.DataType.Type.BINARY: "VARBINARY", 256 exp.DataType.Type.TEXT: "VARCHAR", 257 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 258 exp.DataType.Type.STRUCT: "ROW", 259 } 260 261 TRANSFORMS = { 262 **generator.Generator.TRANSFORMS, # type: ignore 263 exp.ApproxDistinct: _approx_distinct_sql, 264 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 265 exp.ArrayConcat: rename_func("CONCAT"), 266 exp.ArrayContains: rename_func("CONTAINS"), 267 exp.ArraySize: rename_func("CARDINALITY"), 268 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 269 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 270 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 271 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 272 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 273 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 275 exp.DataType: _datatype_sql, 276 exp.DateAdd: lambda self, e: self.func( 277 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 278 ), 279 exp.DateDiff: lambda self, e: self.func( 280 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 281 ), 282 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 283 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 284 exp.Decode: _decode_sql, 285 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 286 exp.Encode: _encode_sql, 287 exp.Group: transforms.preprocess([transforms.unalias_group]), 288 exp.Hex: rename_func("TO_HEX"), 289 exp.If: if_sql, 290 exp.ILike: no_ilike_sql, 291 exp.Initcap: _initcap_sql, 292 exp.Lateral: _explode_to_unnest_sql, 293 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 294 exp.LogicalOr: rename_func("BOOL_OR"), 295 exp.LogicalAnd: rename_func("BOOL_AND"), 296 exp.Quantile: _quantile_sql, 297 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 298 exp.SafeDivide: no_safe_divide_sql, 299 exp.Schema: _schema_sql, 300 exp.Select: transforms.preprocess( 301 [ 302 transforms.eliminate_qualify, 303 transforms.eliminate_distinct_on, 304 transforms.explode_to_unnest, 305 ] 306 ), 307 exp.SortArray: _no_sort_array, 308 exp.StrPosition: rename_func("STRPOS"), 309 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 310 exp.StrToTime: _str_to_time_sql, 311 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 312 exp.StructExtract: struct_extract_sql, 313 exp.TableFormatProperty: lambda self, e: f"TABLE_FORMAT='{e.name.upper()}'", 314 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 315 exp.Table: transforms.preprocess([_unnest_sequence]), 316 exp.TimestampTrunc: timestamptrunc_sql, 317 exp.TimeStrToDate: timestrtotime_sql, 318 exp.TimeStrToTime: timestrtotime_sql, 319 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 320 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 321 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 322 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 323 exp.TsOrDsAdd: _ts_or_ds_add_sql, 324 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 325 exp.Unhex: rename_func("FROM_HEX"), 326 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 327 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 328 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 329 exp.VariancePop: rename_func("VAR_POP"), 330 exp.WithinGroup: transforms.preprocess( 331 [transforms.remove_within_group_for_percentiles] 332 ), 333 } 334 335 def interval_sql(self, expression: exp.Interval) -> str: 336 unit = self.sql(expression, "unit") 337 if expression.this and unit.lower().startswith("week"): 338 return f"({expression.this.name} * INTERVAL '7' day)" 339 return super().interval_sql(expression) 340 341 def transaction_sql(self, expression: exp.Transaction) -> str: 342 modes = expression.args.get("modes") 343 modes = f" {', '.join(modes)}" if modes else "" 344 return f"START TRANSACTION{modes}" 345 346 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 347 start = expression.args["start"] 348 end = expression.args["end"] 349 step = expression.args.get("step") 350 351 if isinstance(start, exp.Cast): 352 target_type = start.to 353 elif isinstance(end, exp.Cast): 354 target_type = end.to 355 else: 356 target_type = None 357 358 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 359 to = target_type.copy() 360 361 if target_type is start.to: 362 end = exp.Cast(this=end, to=to) 363 else: 364 start = exp.Cast(this=start, to=to) 365 366 return self.func("SEQUENCE", start, end, step)
190 class Tokenizer(tokens.Tokenizer): 191 KEYWORDS = { 192 **tokens.Tokenizer.KEYWORDS, 193 "START": TokenType.BEGIN, 194 "MATCH_RECOGNIZE": TokenType.MATCH_RECOGNIZE, 195 "ROW": TokenType.STRUCT, 196 }
Inherited Members
198 class Parser(parser.Parser): 199 FUNCTIONS = { 200 **parser.Parser.FUNCTIONS, # type: ignore 201 "APPROX_DISTINCT": exp.ApproxDistinct.from_arg_list, 202 "APPROX_PERCENTILE": _approx_percentile, 203 "CARDINALITY": exp.ArraySize.from_arg_list, 204 "CONTAINS": exp.ArrayContains.from_arg_list, 205 "DATE_ADD": lambda args: exp.DateAdd( 206 this=seq_get(args, 2), 207 expression=seq_get(args, 1), 208 unit=seq_get(args, 0), 209 ), 210 "DATE_DIFF": lambda args: exp.DateDiff( 211 this=seq_get(args, 2), 212 expression=seq_get(args, 1), 213 unit=seq_get(args, 0), 214 ), 215 "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "presto"), 216 "DATE_PARSE": format_time_lambda(exp.StrToTime, "presto"), 217 "DATE_TRUNC": date_trunc_to_time, 218 "FROM_HEX": exp.Unhex.from_arg_list, 219 "FROM_UNIXTIME": _from_unixtime, 220 "FROM_UTF8": lambda args: exp.Decode( 221 this=seq_get(args, 0), replace=seq_get(args, 1), charset=exp.Literal.string("utf-8") 222 ), 223 "NOW": exp.CurrentTimestamp.from_arg_list, 224 "SEQUENCE": exp.GenerateSeries.from_arg_list, 225 "STRPOS": lambda args: exp.StrPosition( 226 this=seq_get(args, 0), 227 substr=seq_get(args, 1), 228 instance=seq_get(args, 2), 229 ), 230 "TO_UNIXTIME": exp.TimeToUnix.from_arg_list, 231 "TO_HEX": exp.Hex.from_arg_list, 232 "TO_UTF8": lambda args: exp.Encode( 233 this=seq_get(args, 0), charset=exp.Literal.string("utf-8") 234 ), 235 } 236 FUNCTION_PARSERS = parser.Parser.FUNCTION_PARSERS.copy() 237 FUNCTION_PARSERS.pop("TRIM")
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
239 class Generator(generator.Generator): 240 INTERVAL_ALLOWS_PLURAL_FORM = False 241 JOIN_HINTS = False 242 TABLE_HINTS = False 243 STRUCT_DELIMITER = ("(", ")") 244 245 PROPERTIES_LOCATION = { 246 **generator.Generator.PROPERTIES_LOCATION, # type: ignore 247 exp.LocationProperty: exp.Properties.Location.UNSUPPORTED, 248 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 249 } 250 251 TYPE_MAPPING = { 252 **generator.Generator.TYPE_MAPPING, # type: ignore 253 exp.DataType.Type.INT: "INTEGER", 254 exp.DataType.Type.FLOAT: "REAL", 255 exp.DataType.Type.BINARY: "VARBINARY", 256 exp.DataType.Type.TEXT: "VARCHAR", 257 exp.DataType.Type.TIMESTAMPTZ: "TIMESTAMP", 258 exp.DataType.Type.STRUCT: "ROW", 259 } 260 261 TRANSFORMS = { 262 **generator.Generator.TRANSFORMS, # type: ignore 263 exp.ApproxDistinct: _approx_distinct_sql, 264 exp.Array: lambda self, e: f"ARRAY[{self.expressions(e, flat=True)}]", 265 exp.ArrayConcat: rename_func("CONCAT"), 266 exp.ArrayContains: rename_func("CONTAINS"), 267 exp.ArraySize: rename_func("CARDINALITY"), 268 exp.BitwiseAnd: lambda self, e: f"BITWISE_AND({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 269 exp.BitwiseLeftShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_LEFT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 270 exp.BitwiseNot: lambda self, e: f"BITWISE_NOT({self.sql(e, 'this')})", 271 exp.BitwiseOr: lambda self, e: f"BITWISE_OR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 272 exp.BitwiseRightShift: lambda self, e: f"BITWISE_ARITHMETIC_SHIFT_RIGHT({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 273 exp.BitwiseXor: lambda self, e: f"BITWISE_XOR({self.sql(e, 'this')}, {self.sql(e, 'expression')})", 274 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 275 exp.DataType: _datatype_sql, 276 exp.DateAdd: lambda self, e: self.func( 277 "DATE_ADD", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 278 ), 279 exp.DateDiff: lambda self, e: self.func( 280 "DATE_DIFF", exp.Literal.string(e.text("unit") or "day"), e.expression, e.this 281 ), 282 exp.DateStrToDate: lambda self, e: f"CAST(DATE_PARSE({self.sql(e, 'this')}, {Presto.date_format}) AS DATE)", 283 exp.DateToDi: lambda self, e: f"CAST(DATE_FORMAT({self.sql(e, 'this')}, {Presto.dateint_format}) AS INT)", 284 exp.Decode: _decode_sql, 285 exp.DiToDate: lambda self, e: f"CAST(DATE_PARSE(CAST({self.sql(e, 'this')} AS VARCHAR), {Presto.dateint_format}) AS DATE)", 286 exp.Encode: _encode_sql, 287 exp.Group: transforms.preprocess([transforms.unalias_group]), 288 exp.Hex: rename_func("TO_HEX"), 289 exp.If: if_sql, 290 exp.ILike: no_ilike_sql, 291 exp.Initcap: _initcap_sql, 292 exp.Lateral: _explode_to_unnest_sql, 293 exp.Levenshtein: rename_func("LEVENSHTEIN_DISTANCE"), 294 exp.LogicalOr: rename_func("BOOL_OR"), 295 exp.LogicalAnd: rename_func("BOOL_AND"), 296 exp.Quantile: _quantile_sql, 297 exp.ApproxQuantile: rename_func("APPROX_PERCENTILE"), 298 exp.SafeDivide: no_safe_divide_sql, 299 exp.Schema: _schema_sql, 300 exp.Select: transforms.preprocess( 301 [ 302 transforms.eliminate_qualify, 303 transforms.eliminate_distinct_on, 304 transforms.explode_to_unnest, 305 ] 306 ), 307 exp.SortArray: _no_sort_array, 308 exp.StrPosition: rename_func("STRPOS"), 309 exp.StrToDate: lambda self, e: f"CAST({_str_to_time_sql(self, e)} AS DATE)", 310 exp.StrToTime: _str_to_time_sql, 311 exp.StrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {self.format_time(e)}))", 312 exp.StructExtract: struct_extract_sql, 313 exp.TableFormatProperty: lambda self, e: f"TABLE_FORMAT='{e.name.upper()}'", 314 exp.FileFormatProperty: lambda self, e: f"FORMAT='{e.name.upper()}'", 315 exp.Table: transforms.preprocess([_unnest_sequence]), 316 exp.TimestampTrunc: timestamptrunc_sql, 317 exp.TimeStrToDate: timestrtotime_sql, 318 exp.TimeStrToTime: timestrtotime_sql, 319 exp.TimeStrToUnix: lambda self, e: f"TO_UNIXTIME(DATE_PARSE({self.sql(e, 'this')}, {Presto.time_format}))", 320 exp.TimeToStr: lambda self, e: f"DATE_FORMAT({self.sql(e, 'this')}, {self.format_time(e)})", 321 exp.TimeToUnix: rename_func("TO_UNIXTIME"), 322 exp.TsOrDiToDi: lambda self, e: f"CAST(SUBSTR(REPLACE(CAST({self.sql(e, 'this')} AS VARCHAR), '-', ''), 1, 8) AS INT)", 323 exp.TsOrDsAdd: _ts_or_ds_add_sql, 324 exp.TsOrDsToDate: _ts_or_ds_to_date_sql, 325 exp.Unhex: rename_func("FROM_HEX"), 326 exp.UnixToStr: lambda self, e: f"DATE_FORMAT(FROM_UNIXTIME({self.sql(e, 'this')}), {self.format_time(e)})", 327 exp.UnixToTime: rename_func("FROM_UNIXTIME"), 328 exp.UnixToTimeStr: lambda self, e: f"CAST(FROM_UNIXTIME({self.sql(e, 'this')}) AS VARCHAR)", 329 exp.VariancePop: rename_func("VAR_POP"), 330 exp.WithinGroup: transforms.preprocess( 331 [transforms.remove_within_group_for_percentiles] 332 ), 333 } 334 335 def interval_sql(self, expression: exp.Interval) -> str: 336 unit = self.sql(expression, "unit") 337 if expression.this and unit.lower().startswith("week"): 338 return f"({expression.this.name} * INTERVAL '7' day)" 339 return super().interval_sql(expression) 340 341 def transaction_sql(self, expression: exp.Transaction) -> str: 342 modes = expression.args.get("modes") 343 modes = f" {', '.join(modes)}" if modes else "" 344 return f"START TRANSACTION{modes}" 345 346 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 347 start = expression.args["start"] 348 end = expression.args["end"] 349 step = expression.args.get("step") 350 351 if isinstance(start, exp.Cast): 352 target_type = start.to 353 elif isinstance(end, exp.Cast): 354 target_type = end.to 355 else: 356 target_type = None 357 358 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 359 to = target_type.copy() 360 361 if target_type is start.to: 362 end = exp.Cast(this=end, to=to) 363 else: 364 start = exp.Cast(this=start, to=to) 365 366 return self.func("SEQUENCE", start, end, step)
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- bit_start (str): specifies which starting character to use to delimit bit literals. Default: None.
- bit_end (str): specifies which ending character to use to delimit bit literals. Default: None.
- hex_start (str): specifies which starting character to use to delimit hex literals. Default: None.
- hex_end (str): specifies which ending character to use to delimit hex literals. Default: None.
- byte_start (str): specifies which starting character to use to delimit byte literals. Default: None.
- byte_end (str): specifies which ending character to use to delimit byte literals. Default: None.
- identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
346 def generateseries_sql(self, expression: exp.GenerateSeries) -> str: 347 start = expression.args["start"] 348 end = expression.args["end"] 349 step = expression.args.get("step") 350 351 if isinstance(start, exp.Cast): 352 target_type = start.to 353 elif isinstance(end, exp.Cast): 354 target_type = end.to 355 else: 356 target_type = None 357 358 if target_type and target_type.is_type(exp.DataType.Type.TIMESTAMP): 359 to = target_type.copy() 360 361 if target_type is start.to: 362 end = exp.Cast(this=end, to=to) 363 else: 364 start = exp.Cast(this=start, to=to) 365 366 return self.func("SEQUENCE", start, end, step)
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- afterjournalproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql