Edit on GitHub

sqlglot.dialects.mysql

  1from __future__ import annotations
  2
  3from sqlglot import exp, generator, parser, tokens, transforms
  4from sqlglot.dialects.dialect import (
  5    Dialect,
  6    arrow_json_extract_scalar_sql,
  7    datestrtodate_sql,
  8    format_time_lambda,
  9    locate_to_strposition,
 10    max_or_greatest,
 11    min_or_least,
 12    no_ilike_sql,
 13    no_paren_current_date_sql,
 14    no_tablesample_sql,
 15    no_trycast_sql,
 16    parse_date_delta_with_interval,
 17    rename_func,
 18    strposition_to_locate_sql,
 19)
 20from sqlglot.helper import seq_get
 21from sqlglot.tokens import TokenType
 22
 23
 24def _show_parser(*args, **kwargs):
 25    def _parse(self):
 26        return self._parse_show_mysql(*args, **kwargs)
 27
 28    return _parse
 29
 30
 31def _date_trunc_sql(self, expression):
 32    expr = self.sql(expression, "this")
 33    unit = expression.text("unit")
 34
 35    if unit == "day":
 36        return f"DATE({expr})"
 37
 38    if unit == "week":
 39        concat = f"CONCAT(YEAR({expr}), ' ', WEEK({expr}, 1), ' 1')"
 40        date_format = "%Y %u %w"
 41    elif unit == "month":
 42        concat = f"CONCAT(YEAR({expr}), ' ', MONTH({expr}), ' 1')"
 43        date_format = "%Y %c %e"
 44    elif unit == "quarter":
 45        concat = f"CONCAT(YEAR({expr}), ' ', QUARTER({expr}) * 3 - 2, ' 1')"
 46        date_format = "%Y %c %e"
 47    elif unit == "year":
 48        concat = f"CONCAT(YEAR({expr}), ' 1 1')"
 49        date_format = "%Y %c %e"
 50    else:
 51        self.unsupported(f"Unexpected interval unit: {unit}")
 52        return f"DATE({expr})"
 53
 54    return f"STR_TO_DATE({concat}, '{date_format}')"
 55
 56
 57def _str_to_date(args):
 58    date_format = MySQL.format_time(seq_get(args, 1))
 59    return exp.StrToDate(this=seq_get(args, 0), format=date_format)
 60
 61
 62def _str_to_date_sql(self, expression):
 63    date_format = self.format_time(expression)
 64    return f"STR_TO_DATE({self.sql(expression.this)}, {date_format})"
 65
 66
 67def _trim_sql(self, expression):
 68    target = self.sql(expression, "this")
 69    trim_type = self.sql(expression, "position")
 70    remove_chars = self.sql(expression, "expression")
 71
 72    # Use TRIM/LTRIM/RTRIM syntax if the expression isn't mysql-specific
 73    if not remove_chars:
 74        return self.trim_sql(expression)
 75
 76    trim_type = f"{trim_type} " if trim_type else ""
 77    remove_chars = f"{remove_chars} " if remove_chars else ""
 78    from_part = "FROM " if trim_type or remove_chars else ""
 79    return f"TRIM({trim_type}{remove_chars}{from_part}{target})"
 80
 81
 82def _date_add_sql(kind):
 83    def func(self, expression):
 84        this = self.sql(expression, "this")
 85        unit = expression.text("unit").upper() or "DAY"
 86        return (
 87            f"DATE_{kind}({this}, {self.sql(exp.Interval(this=expression.expression, unit=unit))})"
 88        )
 89
 90    return func
 91
 92
 93class MySQL(Dialect):
 94    time_format = "'%Y-%m-%d %T'"
 95
 96    # https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions
 97    time_mapping = {
 98        "%M": "%B",
 99        "%c": "%-m",
100        "%e": "%-d",
101        "%h": "%I",
102        "%i": "%M",
103        "%s": "%S",
104        "%S": "%S",
105        "%u": "%W",
106        "%k": "%-H",
107        "%l": "%-I",
108        "%T": "%H:%M:%S",
109        "%W": "%a",
110    }
111
112    class Tokenizer(tokens.Tokenizer):
113        QUOTES = ["'", '"']
114        COMMENTS = ["--", "#", ("/*", "*/")]
115        IDENTIFIERS = ["`"]
116        STRING_ESCAPES = ["'", "\\"]
117        BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
118        HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
119
120        KEYWORDS = {
121            **tokens.Tokenizer.KEYWORDS,
122            "CHARSET": TokenType.CHARACTER_SET,
123            "LONGBLOB": TokenType.LONGBLOB,
124            "LONGTEXT": TokenType.LONGTEXT,
125            "MEDIUMBLOB": TokenType.MEDIUMBLOB,
126            "MEDIUMTEXT": TokenType.MEDIUMTEXT,
127            "SEPARATOR": TokenType.SEPARATOR,
128            "START": TokenType.BEGIN,
129            "_ARMSCII8": TokenType.INTRODUCER,
130            "_ASCII": TokenType.INTRODUCER,
131            "_BIG5": TokenType.INTRODUCER,
132            "_BINARY": TokenType.INTRODUCER,
133            "_CP1250": TokenType.INTRODUCER,
134            "_CP1251": TokenType.INTRODUCER,
135            "_CP1256": TokenType.INTRODUCER,
136            "_CP1257": TokenType.INTRODUCER,
137            "_CP850": TokenType.INTRODUCER,
138            "_CP852": TokenType.INTRODUCER,
139            "_CP866": TokenType.INTRODUCER,
140            "_CP932": TokenType.INTRODUCER,
141            "_DEC8": TokenType.INTRODUCER,
142            "_EUCJPMS": TokenType.INTRODUCER,
143            "_EUCKR": TokenType.INTRODUCER,
144            "_GB18030": TokenType.INTRODUCER,
145            "_GB2312": TokenType.INTRODUCER,
146            "_GBK": TokenType.INTRODUCER,
147            "_GEOSTD8": TokenType.INTRODUCER,
148            "_GREEK": TokenType.INTRODUCER,
149            "_HEBREW": TokenType.INTRODUCER,
150            "_HP8": TokenType.INTRODUCER,
151            "_KEYBCS2": TokenType.INTRODUCER,
152            "_KOI8R": TokenType.INTRODUCER,
153            "_KOI8U": TokenType.INTRODUCER,
154            "_LATIN1": TokenType.INTRODUCER,
155            "_LATIN2": TokenType.INTRODUCER,
156            "_LATIN5": TokenType.INTRODUCER,
157            "_LATIN7": TokenType.INTRODUCER,
158            "_MACCE": TokenType.INTRODUCER,
159            "_MACROMAN": TokenType.INTRODUCER,
160            "_SJIS": TokenType.INTRODUCER,
161            "_SWE7": TokenType.INTRODUCER,
162            "_TIS620": TokenType.INTRODUCER,
163            "_UCS2": TokenType.INTRODUCER,
164            "_UJIS": TokenType.INTRODUCER,
165            # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
166            "_UTF8": TokenType.INTRODUCER,
167            "_UTF16": TokenType.INTRODUCER,
168            "_UTF16LE": TokenType.INTRODUCER,
169            "_UTF32": TokenType.INTRODUCER,
170            "_UTF8MB3": TokenType.INTRODUCER,
171            "_UTF8MB4": TokenType.INTRODUCER,
172            "@@": TokenType.SESSION_PARAMETER,
173        }
174
175        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
176
177    class Parser(parser.Parser):
178        FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA, TokenType.DATABASE}  # type: ignore
179
180        FUNCTIONS = {
181            **parser.Parser.FUNCTIONS,  # type: ignore
182            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
183            "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "mysql"),
184            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
185            "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
186            "LEFT": lambda args: exp.Substring(
187                this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
188            ),
189            "LOCATE": locate_to_strposition,
190            "STR_TO_DATE": _str_to_date,
191        }
192
193        FUNCTION_PARSERS = {
194            **parser.Parser.FUNCTION_PARSERS,  # type: ignore
195            "GROUP_CONCAT": lambda self: self.expression(
196                exp.GroupConcat,
197                this=self._parse_lambda(),
198                separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
199            ),
200        }
201
202        STATEMENT_PARSERS = {
203            **parser.Parser.STATEMENT_PARSERS,  # type: ignore
204            TokenType.SHOW: lambda self: self._parse_show(),
205        }
206
207        SHOW_PARSERS = {
208            "BINARY LOGS": _show_parser("BINARY LOGS"),
209            "MASTER LOGS": _show_parser("BINARY LOGS"),
210            "BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
211            "CHARACTER SET": _show_parser("CHARACTER SET"),
212            "CHARSET": _show_parser("CHARACTER SET"),
213            "COLLATION": _show_parser("COLLATION"),
214            "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
215            "COLUMNS": _show_parser("COLUMNS", target="FROM"),
216            "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
217            "CREATE EVENT": _show_parser("CREATE EVENT", target=True),
218            "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
219            "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
220            "CREATE TABLE": _show_parser("CREATE TABLE", target=True),
221            "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
222            "CREATE VIEW": _show_parser("CREATE VIEW", target=True),
223            "DATABASES": _show_parser("DATABASES"),
224            "ENGINE": _show_parser("ENGINE", target=True),
225            "STORAGE ENGINES": _show_parser("ENGINES"),
226            "ENGINES": _show_parser("ENGINES"),
227            "ERRORS": _show_parser("ERRORS"),
228            "EVENTS": _show_parser("EVENTS"),
229            "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
230            "FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
231            "GRANTS": _show_parser("GRANTS", target="FOR"),
232            "INDEX": _show_parser("INDEX", target="FROM"),
233            "MASTER STATUS": _show_parser("MASTER STATUS"),
234            "OPEN TABLES": _show_parser("OPEN TABLES"),
235            "PLUGINS": _show_parser("PLUGINS"),
236            "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
237            "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
238            "PRIVILEGES": _show_parser("PRIVILEGES"),
239            "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
240            "PROCESSLIST": _show_parser("PROCESSLIST"),
241            "PROFILE": _show_parser("PROFILE"),
242            "PROFILES": _show_parser("PROFILES"),
243            "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
244            "REPLICAS": _show_parser("REPLICAS"),
245            "SLAVE HOSTS": _show_parser("REPLICAS"),
246            "REPLICA STATUS": _show_parser("REPLICA STATUS"),
247            "SLAVE STATUS": _show_parser("REPLICA STATUS"),
248            "GLOBAL STATUS": _show_parser("STATUS", global_=True),
249            "SESSION STATUS": _show_parser("STATUS"),
250            "STATUS": _show_parser("STATUS"),
251            "TABLE STATUS": _show_parser("TABLE STATUS"),
252            "FULL TABLES": _show_parser("TABLES", full=True),
253            "TABLES": _show_parser("TABLES"),
254            "TRIGGERS": _show_parser("TRIGGERS"),
255            "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
256            "SESSION VARIABLES": _show_parser("VARIABLES"),
257            "VARIABLES": _show_parser("VARIABLES"),
258            "WARNINGS": _show_parser("WARNINGS"),
259        }
260
261        SET_PARSERS = {
262            **parser.Parser.SET_PARSERS,
263            "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
264            "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
265            "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
266            "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
267            "NAMES": lambda self: self._parse_set_item_names(),
268        }
269
270        PROFILE_TYPES = {
271            "ALL",
272            "BLOCK IO",
273            "CONTEXT SWITCHES",
274            "CPU",
275            "IPC",
276            "MEMORY",
277            "PAGE FAULTS",
278            "SOURCE",
279            "SWAPS",
280        }
281
282        LOG_DEFAULTS_TO_LN = True
283
284        def _parse_show_mysql(self, this, target=False, full=None, global_=None):
285            if target:
286                if isinstance(target, str):
287                    self._match_text_seq(target)
288                target_id = self._parse_id_var()
289            else:
290                target_id = None
291
292            log = self._parse_string() if self._match_text_seq("IN") else None
293
294            if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
295                position = self._parse_number() if self._match_text_seq("FROM") else None
296                db = None
297            else:
298                position = None
299                db = None
300
301                if self._match(TokenType.FROM):
302                    db = self._parse_id_var()
303                elif self._match(TokenType.DOT):
304                    db = target_id
305                    target_id = self._parse_id_var()
306
307            channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
308
309            like = self._parse_string() if self._match_text_seq("LIKE") else None
310            where = self._parse_where()
311
312            if this == "PROFILE":
313                types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
314                query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
315                offset = self._parse_number() if self._match_text_seq("OFFSET") else None
316                limit = self._parse_number() if self._match_text_seq("LIMIT") else None
317            else:
318                types, query = None, None
319                offset, limit = self._parse_oldstyle_limit()
320
321            mutex = True if self._match_text_seq("MUTEX") else None
322            mutex = False if self._match_text_seq("STATUS") else mutex
323
324            return self.expression(
325                exp.Show,
326                this=this,
327                target=target_id,
328                full=full,
329                log=log,
330                position=position,
331                db=db,
332                channel=channel,
333                like=like,
334                where=where,
335                types=types,
336                query=query,
337                offset=offset,
338                limit=limit,
339                mutex=mutex,
340                **{"global": global_},
341            )
342
343        def _parse_oldstyle_limit(self):
344            limit = None
345            offset = None
346            if self._match_text_seq("LIMIT"):
347                parts = self._parse_csv(self._parse_number)
348                if len(parts) == 1:
349                    limit = parts[0]
350                elif len(parts) == 2:
351                    limit = parts[1]
352                    offset = parts[0]
353            return offset, limit
354
355        def _parse_set_item_charset(self, kind):
356            this = self._parse_string() or self._parse_id_var()
357
358            return self.expression(
359                exp.SetItem,
360                this=this,
361                kind=kind,
362            )
363
364        def _parse_set_item_names(self):
365            charset = self._parse_string() or self._parse_id_var()
366            if self._match_text_seq("COLLATE"):
367                collate = self._parse_string() or self._parse_id_var()
368            else:
369                collate = None
370            return self.expression(
371                exp.SetItem,
372                this=charset,
373                collate=collate,
374                kind="NAMES",
375            )
376
377    class Generator(generator.Generator):
378        LOCKING_READS_SUPPORTED = True
379        NULL_ORDERING_SUPPORTED = False
380        JOIN_HINTS = False
381        TABLE_HINTS = False
382
383        TRANSFORMS = {
384            **generator.Generator.TRANSFORMS,  # type: ignore
385            exp.CurrentDate: no_paren_current_date_sql,
386            exp.DateDiff: lambda self, e: self.func("DATEDIFF", e.this, e.expression),
387            exp.DateAdd: _date_add_sql("ADD"),
388            exp.DateStrToDate: datestrtodate_sql,
389            exp.DateSub: _date_add_sql("SUB"),
390            exp.DateTrunc: _date_trunc_sql,
391            exp.DayOfMonth: rename_func("DAYOFMONTH"),
392            exp.DayOfWeek: rename_func("DAYOFWEEK"),
393            exp.DayOfYear: rename_func("DAYOFYEAR"),
394            exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
395            exp.ILike: no_ilike_sql,
396            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
397            exp.Max: max_or_greatest,
398            exp.Min: min_or_least,
399            exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
400            exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
401            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
402            exp.StrPosition: strposition_to_locate_sql,
403            exp.StrToDate: _str_to_date_sql,
404            exp.StrToTime: _str_to_date_sql,
405            exp.TableSample: no_tablesample_sql,
406            exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
407            exp.TimeToStr: lambda self, e: self.func("DATE_FORMAT", e.this, self.format_time(e)),
408            exp.Trim: _trim_sql,
409            exp.TryCast: no_trycast_sql,
410            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
411        }
412
413        TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
414        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
415        TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
416        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
417        TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
418
419        PROPERTIES_LOCATION = {
420            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
421            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
422            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
423        }
424
425        LIMIT_FETCH = "LIMIT"
426
427        def show_sql(self, expression: exp.Show) -> str:
428            this = f" {expression.name}"
429            full = " FULL" if expression.args.get("full") else ""
430            global_ = " GLOBAL" if expression.args.get("global") else ""
431
432            target = self.sql(expression, "target")
433            target = f" {target}" if target else ""
434            if expression.name in {"COLUMNS", "INDEX"}:
435                target = f" FROM{target}"
436            elif expression.name == "GRANTS":
437                target = f" FOR{target}"
438
439            db = self._prefixed_sql("FROM", expression, "db")
440
441            like = self._prefixed_sql("LIKE", expression, "like")
442            where = self.sql(expression, "where")
443
444            types = self.expressions(expression, key="types")
445            types = f" {types}" if types else types
446            query = self._prefixed_sql("FOR QUERY", expression, "query")
447
448            if expression.name == "PROFILE":
449                offset = self._prefixed_sql("OFFSET", expression, "offset")
450                limit = self._prefixed_sql("LIMIT", expression, "limit")
451            else:
452                offset = ""
453                limit = self._oldstyle_limit_sql(expression)
454
455            log = self._prefixed_sql("IN", expression, "log")
456            position = self._prefixed_sql("FROM", expression, "position")
457
458            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
459
460            if expression.name == "ENGINE":
461                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
462            else:
463                mutex_or_status = ""
464
465            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
466
467        def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
468            sql = self.sql(expression, arg)
469            if not sql:
470                return ""
471            return f" {prefix} {sql}"
472
473        def _oldstyle_limit_sql(self, expression: exp.Show) -> str:
474            limit = self.sql(expression, "limit")
475            offset = self.sql(expression, "offset")
476            if limit:
477                limit_offset = f"{offset}, {limit}" if offset else limit
478                return f" LIMIT {limit_offset}"
479            return ""
class MySQL(sqlglot.dialects.dialect.Dialect):
 94class MySQL(Dialect):
 95    time_format = "'%Y-%m-%d %T'"
 96
 97    # https://prestodb.io/docs/current/functions/datetime.html#mysql-date-functions
 98    time_mapping = {
 99        "%M": "%B",
100        "%c": "%-m",
101        "%e": "%-d",
102        "%h": "%I",
103        "%i": "%M",
104        "%s": "%S",
105        "%S": "%S",
106        "%u": "%W",
107        "%k": "%-H",
108        "%l": "%-I",
109        "%T": "%H:%M:%S",
110        "%W": "%a",
111    }
112
113    class Tokenizer(tokens.Tokenizer):
114        QUOTES = ["'", '"']
115        COMMENTS = ["--", "#", ("/*", "*/")]
116        IDENTIFIERS = ["`"]
117        STRING_ESCAPES = ["'", "\\"]
118        BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
119        HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
120
121        KEYWORDS = {
122            **tokens.Tokenizer.KEYWORDS,
123            "CHARSET": TokenType.CHARACTER_SET,
124            "LONGBLOB": TokenType.LONGBLOB,
125            "LONGTEXT": TokenType.LONGTEXT,
126            "MEDIUMBLOB": TokenType.MEDIUMBLOB,
127            "MEDIUMTEXT": TokenType.MEDIUMTEXT,
128            "SEPARATOR": TokenType.SEPARATOR,
129            "START": TokenType.BEGIN,
130            "_ARMSCII8": TokenType.INTRODUCER,
131            "_ASCII": TokenType.INTRODUCER,
132            "_BIG5": TokenType.INTRODUCER,
133            "_BINARY": TokenType.INTRODUCER,
134            "_CP1250": TokenType.INTRODUCER,
135            "_CP1251": TokenType.INTRODUCER,
136            "_CP1256": TokenType.INTRODUCER,
137            "_CP1257": TokenType.INTRODUCER,
138            "_CP850": TokenType.INTRODUCER,
139            "_CP852": TokenType.INTRODUCER,
140            "_CP866": TokenType.INTRODUCER,
141            "_CP932": TokenType.INTRODUCER,
142            "_DEC8": TokenType.INTRODUCER,
143            "_EUCJPMS": TokenType.INTRODUCER,
144            "_EUCKR": TokenType.INTRODUCER,
145            "_GB18030": TokenType.INTRODUCER,
146            "_GB2312": TokenType.INTRODUCER,
147            "_GBK": TokenType.INTRODUCER,
148            "_GEOSTD8": TokenType.INTRODUCER,
149            "_GREEK": TokenType.INTRODUCER,
150            "_HEBREW": TokenType.INTRODUCER,
151            "_HP8": TokenType.INTRODUCER,
152            "_KEYBCS2": TokenType.INTRODUCER,
153            "_KOI8R": TokenType.INTRODUCER,
154            "_KOI8U": TokenType.INTRODUCER,
155            "_LATIN1": TokenType.INTRODUCER,
156            "_LATIN2": TokenType.INTRODUCER,
157            "_LATIN5": TokenType.INTRODUCER,
158            "_LATIN7": TokenType.INTRODUCER,
159            "_MACCE": TokenType.INTRODUCER,
160            "_MACROMAN": TokenType.INTRODUCER,
161            "_SJIS": TokenType.INTRODUCER,
162            "_SWE7": TokenType.INTRODUCER,
163            "_TIS620": TokenType.INTRODUCER,
164            "_UCS2": TokenType.INTRODUCER,
165            "_UJIS": TokenType.INTRODUCER,
166            # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
167            "_UTF8": TokenType.INTRODUCER,
168            "_UTF16": TokenType.INTRODUCER,
169            "_UTF16LE": TokenType.INTRODUCER,
170            "_UTF32": TokenType.INTRODUCER,
171            "_UTF8MB3": TokenType.INTRODUCER,
172            "_UTF8MB4": TokenType.INTRODUCER,
173            "@@": TokenType.SESSION_PARAMETER,
174        }
175
176        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
177
178    class Parser(parser.Parser):
179        FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA, TokenType.DATABASE}  # type: ignore
180
181        FUNCTIONS = {
182            **parser.Parser.FUNCTIONS,  # type: ignore
183            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
184            "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "mysql"),
185            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
186            "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
187            "LEFT": lambda args: exp.Substring(
188                this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
189            ),
190            "LOCATE": locate_to_strposition,
191            "STR_TO_DATE": _str_to_date,
192        }
193
194        FUNCTION_PARSERS = {
195            **parser.Parser.FUNCTION_PARSERS,  # type: ignore
196            "GROUP_CONCAT": lambda self: self.expression(
197                exp.GroupConcat,
198                this=self._parse_lambda(),
199                separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
200            ),
201        }
202
203        STATEMENT_PARSERS = {
204            **parser.Parser.STATEMENT_PARSERS,  # type: ignore
205            TokenType.SHOW: lambda self: self._parse_show(),
206        }
207
208        SHOW_PARSERS = {
209            "BINARY LOGS": _show_parser("BINARY LOGS"),
210            "MASTER LOGS": _show_parser("BINARY LOGS"),
211            "BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
212            "CHARACTER SET": _show_parser("CHARACTER SET"),
213            "CHARSET": _show_parser("CHARACTER SET"),
214            "COLLATION": _show_parser("COLLATION"),
215            "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
216            "COLUMNS": _show_parser("COLUMNS", target="FROM"),
217            "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
218            "CREATE EVENT": _show_parser("CREATE EVENT", target=True),
219            "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
220            "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
221            "CREATE TABLE": _show_parser("CREATE TABLE", target=True),
222            "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
223            "CREATE VIEW": _show_parser("CREATE VIEW", target=True),
224            "DATABASES": _show_parser("DATABASES"),
225            "ENGINE": _show_parser("ENGINE", target=True),
226            "STORAGE ENGINES": _show_parser("ENGINES"),
227            "ENGINES": _show_parser("ENGINES"),
228            "ERRORS": _show_parser("ERRORS"),
229            "EVENTS": _show_parser("EVENTS"),
230            "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
231            "FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
232            "GRANTS": _show_parser("GRANTS", target="FOR"),
233            "INDEX": _show_parser("INDEX", target="FROM"),
234            "MASTER STATUS": _show_parser("MASTER STATUS"),
235            "OPEN TABLES": _show_parser("OPEN TABLES"),
236            "PLUGINS": _show_parser("PLUGINS"),
237            "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
238            "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
239            "PRIVILEGES": _show_parser("PRIVILEGES"),
240            "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
241            "PROCESSLIST": _show_parser("PROCESSLIST"),
242            "PROFILE": _show_parser("PROFILE"),
243            "PROFILES": _show_parser("PROFILES"),
244            "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
245            "REPLICAS": _show_parser("REPLICAS"),
246            "SLAVE HOSTS": _show_parser("REPLICAS"),
247            "REPLICA STATUS": _show_parser("REPLICA STATUS"),
248            "SLAVE STATUS": _show_parser("REPLICA STATUS"),
249            "GLOBAL STATUS": _show_parser("STATUS", global_=True),
250            "SESSION STATUS": _show_parser("STATUS"),
251            "STATUS": _show_parser("STATUS"),
252            "TABLE STATUS": _show_parser("TABLE STATUS"),
253            "FULL TABLES": _show_parser("TABLES", full=True),
254            "TABLES": _show_parser("TABLES"),
255            "TRIGGERS": _show_parser("TRIGGERS"),
256            "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
257            "SESSION VARIABLES": _show_parser("VARIABLES"),
258            "VARIABLES": _show_parser("VARIABLES"),
259            "WARNINGS": _show_parser("WARNINGS"),
260        }
261
262        SET_PARSERS = {
263            **parser.Parser.SET_PARSERS,
264            "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
265            "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
266            "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
267            "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
268            "NAMES": lambda self: self._parse_set_item_names(),
269        }
270
271        PROFILE_TYPES = {
272            "ALL",
273            "BLOCK IO",
274            "CONTEXT SWITCHES",
275            "CPU",
276            "IPC",
277            "MEMORY",
278            "PAGE FAULTS",
279            "SOURCE",
280            "SWAPS",
281        }
282
283        LOG_DEFAULTS_TO_LN = True
284
285        def _parse_show_mysql(self, this, target=False, full=None, global_=None):
286            if target:
287                if isinstance(target, str):
288                    self._match_text_seq(target)
289                target_id = self._parse_id_var()
290            else:
291                target_id = None
292
293            log = self._parse_string() if self._match_text_seq("IN") else None
294
295            if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
296                position = self._parse_number() if self._match_text_seq("FROM") else None
297                db = None
298            else:
299                position = None
300                db = None
301
302                if self._match(TokenType.FROM):
303                    db = self._parse_id_var()
304                elif self._match(TokenType.DOT):
305                    db = target_id
306                    target_id = self._parse_id_var()
307
308            channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
309
310            like = self._parse_string() if self._match_text_seq("LIKE") else None
311            where = self._parse_where()
312
313            if this == "PROFILE":
314                types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
315                query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
316                offset = self._parse_number() if self._match_text_seq("OFFSET") else None
317                limit = self._parse_number() if self._match_text_seq("LIMIT") else None
318            else:
319                types, query = None, None
320                offset, limit = self._parse_oldstyle_limit()
321
322            mutex = True if self._match_text_seq("MUTEX") else None
323            mutex = False if self._match_text_seq("STATUS") else mutex
324
325            return self.expression(
326                exp.Show,
327                this=this,
328                target=target_id,
329                full=full,
330                log=log,
331                position=position,
332                db=db,
333                channel=channel,
334                like=like,
335                where=where,
336                types=types,
337                query=query,
338                offset=offset,
339                limit=limit,
340                mutex=mutex,
341                **{"global": global_},
342            )
343
344        def _parse_oldstyle_limit(self):
345            limit = None
346            offset = None
347            if self._match_text_seq("LIMIT"):
348                parts = self._parse_csv(self._parse_number)
349                if len(parts) == 1:
350                    limit = parts[0]
351                elif len(parts) == 2:
352                    limit = parts[1]
353                    offset = parts[0]
354            return offset, limit
355
356        def _parse_set_item_charset(self, kind):
357            this = self._parse_string() or self._parse_id_var()
358
359            return self.expression(
360                exp.SetItem,
361                this=this,
362                kind=kind,
363            )
364
365        def _parse_set_item_names(self):
366            charset = self._parse_string() or self._parse_id_var()
367            if self._match_text_seq("COLLATE"):
368                collate = self._parse_string() or self._parse_id_var()
369            else:
370                collate = None
371            return self.expression(
372                exp.SetItem,
373                this=charset,
374                collate=collate,
375                kind="NAMES",
376            )
377
378    class Generator(generator.Generator):
379        LOCKING_READS_SUPPORTED = True
380        NULL_ORDERING_SUPPORTED = False
381        JOIN_HINTS = False
382        TABLE_HINTS = False
383
384        TRANSFORMS = {
385            **generator.Generator.TRANSFORMS,  # type: ignore
386            exp.CurrentDate: no_paren_current_date_sql,
387            exp.DateDiff: lambda self, e: self.func("DATEDIFF", e.this, e.expression),
388            exp.DateAdd: _date_add_sql("ADD"),
389            exp.DateStrToDate: datestrtodate_sql,
390            exp.DateSub: _date_add_sql("SUB"),
391            exp.DateTrunc: _date_trunc_sql,
392            exp.DayOfMonth: rename_func("DAYOFMONTH"),
393            exp.DayOfWeek: rename_func("DAYOFWEEK"),
394            exp.DayOfYear: rename_func("DAYOFYEAR"),
395            exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
396            exp.ILike: no_ilike_sql,
397            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
398            exp.Max: max_or_greatest,
399            exp.Min: min_or_least,
400            exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
401            exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
402            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
403            exp.StrPosition: strposition_to_locate_sql,
404            exp.StrToDate: _str_to_date_sql,
405            exp.StrToTime: _str_to_date_sql,
406            exp.TableSample: no_tablesample_sql,
407            exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
408            exp.TimeToStr: lambda self, e: self.func("DATE_FORMAT", e.this, self.format_time(e)),
409            exp.Trim: _trim_sql,
410            exp.TryCast: no_trycast_sql,
411            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
412        }
413
414        TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
415        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
416        TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
417        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
418        TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
419
420        PROPERTIES_LOCATION = {
421            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
422            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
423            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
424        }
425
426        LIMIT_FETCH = "LIMIT"
427
428        def show_sql(self, expression: exp.Show) -> str:
429            this = f" {expression.name}"
430            full = " FULL" if expression.args.get("full") else ""
431            global_ = " GLOBAL" if expression.args.get("global") else ""
432
433            target = self.sql(expression, "target")
434            target = f" {target}" if target else ""
435            if expression.name in {"COLUMNS", "INDEX"}:
436                target = f" FROM{target}"
437            elif expression.name == "GRANTS":
438                target = f" FOR{target}"
439
440            db = self._prefixed_sql("FROM", expression, "db")
441
442            like = self._prefixed_sql("LIKE", expression, "like")
443            where = self.sql(expression, "where")
444
445            types = self.expressions(expression, key="types")
446            types = f" {types}" if types else types
447            query = self._prefixed_sql("FOR QUERY", expression, "query")
448
449            if expression.name == "PROFILE":
450                offset = self._prefixed_sql("OFFSET", expression, "offset")
451                limit = self._prefixed_sql("LIMIT", expression, "limit")
452            else:
453                offset = ""
454                limit = self._oldstyle_limit_sql(expression)
455
456            log = self._prefixed_sql("IN", expression, "log")
457            position = self._prefixed_sql("FROM", expression, "position")
458
459            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
460
461            if expression.name == "ENGINE":
462                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
463            else:
464                mutex_or_status = ""
465
466            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
467
468        def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
469            sql = self.sql(expression, arg)
470            if not sql:
471                return ""
472            return f" {prefix} {sql}"
473
474        def _oldstyle_limit_sql(self, expression: exp.Show) -> str:
475            limit = self.sql(expression, "limit")
476            offset = self.sql(expression, "offset")
477            if limit:
478                limit_offset = f"{offset}, {limit}" if offset else limit
479                return f" LIMIT {limit_offset}"
480            return ""
class MySQL.Tokenizer(sqlglot.tokens.Tokenizer):
113    class Tokenizer(tokens.Tokenizer):
114        QUOTES = ["'", '"']
115        COMMENTS = ["--", "#", ("/*", "*/")]
116        IDENTIFIERS = ["`"]
117        STRING_ESCAPES = ["'", "\\"]
118        BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
119        HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
120
121        KEYWORDS = {
122            **tokens.Tokenizer.KEYWORDS,
123            "CHARSET": TokenType.CHARACTER_SET,
124            "LONGBLOB": TokenType.LONGBLOB,
125            "LONGTEXT": TokenType.LONGTEXT,
126            "MEDIUMBLOB": TokenType.MEDIUMBLOB,
127            "MEDIUMTEXT": TokenType.MEDIUMTEXT,
128            "SEPARATOR": TokenType.SEPARATOR,
129            "START": TokenType.BEGIN,
130            "_ARMSCII8": TokenType.INTRODUCER,
131            "_ASCII": TokenType.INTRODUCER,
132            "_BIG5": TokenType.INTRODUCER,
133            "_BINARY": TokenType.INTRODUCER,
134            "_CP1250": TokenType.INTRODUCER,
135            "_CP1251": TokenType.INTRODUCER,
136            "_CP1256": TokenType.INTRODUCER,
137            "_CP1257": TokenType.INTRODUCER,
138            "_CP850": TokenType.INTRODUCER,
139            "_CP852": TokenType.INTRODUCER,
140            "_CP866": TokenType.INTRODUCER,
141            "_CP932": TokenType.INTRODUCER,
142            "_DEC8": TokenType.INTRODUCER,
143            "_EUCJPMS": TokenType.INTRODUCER,
144            "_EUCKR": TokenType.INTRODUCER,
145            "_GB18030": TokenType.INTRODUCER,
146            "_GB2312": TokenType.INTRODUCER,
147            "_GBK": TokenType.INTRODUCER,
148            "_GEOSTD8": TokenType.INTRODUCER,
149            "_GREEK": TokenType.INTRODUCER,
150            "_HEBREW": TokenType.INTRODUCER,
151            "_HP8": TokenType.INTRODUCER,
152            "_KEYBCS2": TokenType.INTRODUCER,
153            "_KOI8R": TokenType.INTRODUCER,
154            "_KOI8U": TokenType.INTRODUCER,
155            "_LATIN1": TokenType.INTRODUCER,
156            "_LATIN2": TokenType.INTRODUCER,
157            "_LATIN5": TokenType.INTRODUCER,
158            "_LATIN7": TokenType.INTRODUCER,
159            "_MACCE": TokenType.INTRODUCER,
160            "_MACROMAN": TokenType.INTRODUCER,
161            "_SJIS": TokenType.INTRODUCER,
162            "_SWE7": TokenType.INTRODUCER,
163            "_TIS620": TokenType.INTRODUCER,
164            "_UCS2": TokenType.INTRODUCER,
165            "_UJIS": TokenType.INTRODUCER,
166            # https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
167            "_UTF8": TokenType.INTRODUCER,
168            "_UTF16": TokenType.INTRODUCER,
169            "_UTF16LE": TokenType.INTRODUCER,
170            "_UTF32": TokenType.INTRODUCER,
171            "_UTF8MB3": TokenType.INTRODUCER,
172            "_UTF8MB4": TokenType.INTRODUCER,
173            "@@": TokenType.SESSION_PARAMETER,
174        }
175
176        COMMANDS = tokens.Tokenizer.COMMANDS - {TokenType.SHOW}
class MySQL.Parser(sqlglot.parser.Parser):
178    class Parser(parser.Parser):
179        FUNC_TOKENS = {*parser.Parser.FUNC_TOKENS, TokenType.SCHEMA, TokenType.DATABASE}  # type: ignore
180
181        FUNCTIONS = {
182            **parser.Parser.FUNCTIONS,  # type: ignore
183            "DATE_ADD": parse_date_delta_with_interval(exp.DateAdd),
184            "DATE_FORMAT": format_time_lambda(exp.TimeToStr, "mysql"),
185            "DATE_SUB": parse_date_delta_with_interval(exp.DateSub),
186            "INSTR": lambda args: exp.StrPosition(substr=seq_get(args, 1), this=seq_get(args, 0)),
187            "LEFT": lambda args: exp.Substring(
188                this=seq_get(args, 0), start=exp.Literal.number(1), length=seq_get(args, 1)
189            ),
190            "LOCATE": locate_to_strposition,
191            "STR_TO_DATE": _str_to_date,
192        }
193
194        FUNCTION_PARSERS = {
195            **parser.Parser.FUNCTION_PARSERS,  # type: ignore
196            "GROUP_CONCAT": lambda self: self.expression(
197                exp.GroupConcat,
198                this=self._parse_lambda(),
199                separator=self._match(TokenType.SEPARATOR) and self._parse_field(),
200            ),
201        }
202
203        STATEMENT_PARSERS = {
204            **parser.Parser.STATEMENT_PARSERS,  # type: ignore
205            TokenType.SHOW: lambda self: self._parse_show(),
206        }
207
208        SHOW_PARSERS = {
209            "BINARY LOGS": _show_parser("BINARY LOGS"),
210            "MASTER LOGS": _show_parser("BINARY LOGS"),
211            "BINLOG EVENTS": _show_parser("BINLOG EVENTS"),
212            "CHARACTER SET": _show_parser("CHARACTER SET"),
213            "CHARSET": _show_parser("CHARACTER SET"),
214            "COLLATION": _show_parser("COLLATION"),
215            "FULL COLUMNS": _show_parser("COLUMNS", target="FROM", full=True),
216            "COLUMNS": _show_parser("COLUMNS", target="FROM"),
217            "CREATE DATABASE": _show_parser("CREATE DATABASE", target=True),
218            "CREATE EVENT": _show_parser("CREATE EVENT", target=True),
219            "CREATE FUNCTION": _show_parser("CREATE FUNCTION", target=True),
220            "CREATE PROCEDURE": _show_parser("CREATE PROCEDURE", target=True),
221            "CREATE TABLE": _show_parser("CREATE TABLE", target=True),
222            "CREATE TRIGGER": _show_parser("CREATE TRIGGER", target=True),
223            "CREATE VIEW": _show_parser("CREATE VIEW", target=True),
224            "DATABASES": _show_parser("DATABASES"),
225            "ENGINE": _show_parser("ENGINE", target=True),
226            "STORAGE ENGINES": _show_parser("ENGINES"),
227            "ENGINES": _show_parser("ENGINES"),
228            "ERRORS": _show_parser("ERRORS"),
229            "EVENTS": _show_parser("EVENTS"),
230            "FUNCTION CODE": _show_parser("FUNCTION CODE", target=True),
231            "FUNCTION STATUS": _show_parser("FUNCTION STATUS"),
232            "GRANTS": _show_parser("GRANTS", target="FOR"),
233            "INDEX": _show_parser("INDEX", target="FROM"),
234            "MASTER STATUS": _show_parser("MASTER STATUS"),
235            "OPEN TABLES": _show_parser("OPEN TABLES"),
236            "PLUGINS": _show_parser("PLUGINS"),
237            "PROCEDURE CODE": _show_parser("PROCEDURE CODE", target=True),
238            "PROCEDURE STATUS": _show_parser("PROCEDURE STATUS"),
239            "PRIVILEGES": _show_parser("PRIVILEGES"),
240            "FULL PROCESSLIST": _show_parser("PROCESSLIST", full=True),
241            "PROCESSLIST": _show_parser("PROCESSLIST"),
242            "PROFILE": _show_parser("PROFILE"),
243            "PROFILES": _show_parser("PROFILES"),
244            "RELAYLOG EVENTS": _show_parser("RELAYLOG EVENTS"),
245            "REPLICAS": _show_parser("REPLICAS"),
246            "SLAVE HOSTS": _show_parser("REPLICAS"),
247            "REPLICA STATUS": _show_parser("REPLICA STATUS"),
248            "SLAVE STATUS": _show_parser("REPLICA STATUS"),
249            "GLOBAL STATUS": _show_parser("STATUS", global_=True),
250            "SESSION STATUS": _show_parser("STATUS"),
251            "STATUS": _show_parser("STATUS"),
252            "TABLE STATUS": _show_parser("TABLE STATUS"),
253            "FULL TABLES": _show_parser("TABLES", full=True),
254            "TABLES": _show_parser("TABLES"),
255            "TRIGGERS": _show_parser("TRIGGERS"),
256            "GLOBAL VARIABLES": _show_parser("VARIABLES", global_=True),
257            "SESSION VARIABLES": _show_parser("VARIABLES"),
258            "VARIABLES": _show_parser("VARIABLES"),
259            "WARNINGS": _show_parser("WARNINGS"),
260        }
261
262        SET_PARSERS = {
263            **parser.Parser.SET_PARSERS,
264            "PERSIST": lambda self: self._parse_set_item_assignment("PERSIST"),
265            "PERSIST_ONLY": lambda self: self._parse_set_item_assignment("PERSIST_ONLY"),
266            "CHARACTER SET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
267            "CHARSET": lambda self: self._parse_set_item_charset("CHARACTER SET"),
268            "NAMES": lambda self: self._parse_set_item_names(),
269        }
270
271        PROFILE_TYPES = {
272            "ALL",
273            "BLOCK IO",
274            "CONTEXT SWITCHES",
275            "CPU",
276            "IPC",
277            "MEMORY",
278            "PAGE FAULTS",
279            "SOURCE",
280            "SWAPS",
281        }
282
283        LOG_DEFAULTS_TO_LN = True
284
285        def _parse_show_mysql(self, this, target=False, full=None, global_=None):
286            if target:
287                if isinstance(target, str):
288                    self._match_text_seq(target)
289                target_id = self._parse_id_var()
290            else:
291                target_id = None
292
293            log = self._parse_string() if self._match_text_seq("IN") else None
294
295            if this in {"BINLOG EVENTS", "RELAYLOG EVENTS"}:
296                position = self._parse_number() if self._match_text_seq("FROM") else None
297                db = None
298            else:
299                position = None
300                db = None
301
302                if self._match(TokenType.FROM):
303                    db = self._parse_id_var()
304                elif self._match(TokenType.DOT):
305                    db = target_id
306                    target_id = self._parse_id_var()
307
308            channel = self._parse_id_var() if self._match_text_seq("FOR", "CHANNEL") else None
309
310            like = self._parse_string() if self._match_text_seq("LIKE") else None
311            where = self._parse_where()
312
313            if this == "PROFILE":
314                types = self._parse_csv(lambda: self._parse_var_from_options(self.PROFILE_TYPES))
315                query = self._parse_number() if self._match_text_seq("FOR", "QUERY") else None
316                offset = self._parse_number() if self._match_text_seq("OFFSET") else None
317                limit = self._parse_number() if self._match_text_seq("LIMIT") else None
318            else:
319                types, query = None, None
320                offset, limit = self._parse_oldstyle_limit()
321
322            mutex = True if self._match_text_seq("MUTEX") else None
323            mutex = False if self._match_text_seq("STATUS") else mutex
324
325            return self.expression(
326                exp.Show,
327                this=this,
328                target=target_id,
329                full=full,
330                log=log,
331                position=position,
332                db=db,
333                channel=channel,
334                like=like,
335                where=where,
336                types=types,
337                query=query,
338                offset=offset,
339                limit=limit,
340                mutex=mutex,
341                **{"global": global_},
342            )
343
344        def _parse_oldstyle_limit(self):
345            limit = None
346            offset = None
347            if self._match_text_seq("LIMIT"):
348                parts = self._parse_csv(self._parse_number)
349                if len(parts) == 1:
350                    limit = parts[0]
351                elif len(parts) == 2:
352                    limit = parts[1]
353                    offset = parts[0]
354            return offset, limit
355
356        def _parse_set_item_charset(self, kind):
357            this = self._parse_string() or self._parse_id_var()
358
359            return self.expression(
360                exp.SetItem,
361                this=this,
362                kind=kind,
363            )
364
365        def _parse_set_item_names(self):
366            charset = self._parse_string() or self._parse_id_var()
367            if self._match_text_seq("COLLATE"):
368                collate = self._parse_string() or self._parse_id_var()
369            else:
370                collate = None
371            return self.expression(
372                exp.SetItem,
373                this=charset,
374                collate=collate,
375                kind="NAMES",
376            )

Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer and produces a parsed syntax tree.

Arguments:
  • error_level: the desired error level. Default: ErrorLevel.RAISE
  • error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
  • index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
  • alias_post_tablesample: If the table alias comes after tablesample. Default: False
  • max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
  • null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
class MySQL.Generator(sqlglot.generator.Generator):
378    class Generator(generator.Generator):
379        LOCKING_READS_SUPPORTED = True
380        NULL_ORDERING_SUPPORTED = False
381        JOIN_HINTS = False
382        TABLE_HINTS = False
383
384        TRANSFORMS = {
385            **generator.Generator.TRANSFORMS,  # type: ignore
386            exp.CurrentDate: no_paren_current_date_sql,
387            exp.DateDiff: lambda self, e: self.func("DATEDIFF", e.this, e.expression),
388            exp.DateAdd: _date_add_sql("ADD"),
389            exp.DateStrToDate: datestrtodate_sql,
390            exp.DateSub: _date_add_sql("SUB"),
391            exp.DateTrunc: _date_trunc_sql,
392            exp.DayOfMonth: rename_func("DAYOFMONTH"),
393            exp.DayOfWeek: rename_func("DAYOFWEEK"),
394            exp.DayOfYear: rename_func("DAYOFYEAR"),
395            exp.GroupConcat: lambda self, e: f"""GROUP_CONCAT({self.sql(e, "this")} SEPARATOR {self.sql(e, "separator") or "','"})""",
396            exp.ILike: no_ilike_sql,
397            exp.JSONExtractScalar: arrow_json_extract_scalar_sql,
398            exp.Max: max_or_greatest,
399            exp.Min: min_or_least,
400            exp.NullSafeEQ: lambda self, e: self.binary(e, "<=>"),
401            exp.NullSafeNEQ: lambda self, e: self.not_sql(self.binary(e, "<=>")),
402            exp.Select: transforms.preprocess([transforms.eliminate_distinct_on]),
403            exp.StrPosition: strposition_to_locate_sql,
404            exp.StrToDate: _str_to_date_sql,
405            exp.StrToTime: _str_to_date_sql,
406            exp.TableSample: no_tablesample_sql,
407            exp.TimeStrToUnix: rename_func("UNIX_TIMESTAMP"),
408            exp.TimeToStr: lambda self, e: self.func("DATE_FORMAT", e.this, self.format_time(e)),
409            exp.Trim: _trim_sql,
410            exp.TryCast: no_trycast_sql,
411            exp.WeekOfYear: rename_func("WEEKOFYEAR"),
412        }
413
414        TYPE_MAPPING = generator.Generator.TYPE_MAPPING.copy()
415        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMTEXT)
416        TYPE_MAPPING.pop(exp.DataType.Type.LONGTEXT)
417        TYPE_MAPPING.pop(exp.DataType.Type.MEDIUMBLOB)
418        TYPE_MAPPING.pop(exp.DataType.Type.LONGBLOB)
419
420        PROPERTIES_LOCATION = {
421            **generator.Generator.PROPERTIES_LOCATION,  # type: ignore
422            exp.TransientProperty: exp.Properties.Location.UNSUPPORTED,
423            exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED,
424        }
425
426        LIMIT_FETCH = "LIMIT"
427
428        def show_sql(self, expression: exp.Show) -> str:
429            this = f" {expression.name}"
430            full = " FULL" if expression.args.get("full") else ""
431            global_ = " GLOBAL" if expression.args.get("global") else ""
432
433            target = self.sql(expression, "target")
434            target = f" {target}" if target else ""
435            if expression.name in {"COLUMNS", "INDEX"}:
436                target = f" FROM{target}"
437            elif expression.name == "GRANTS":
438                target = f" FOR{target}"
439
440            db = self._prefixed_sql("FROM", expression, "db")
441
442            like = self._prefixed_sql("LIKE", expression, "like")
443            where = self.sql(expression, "where")
444
445            types = self.expressions(expression, key="types")
446            types = f" {types}" if types else types
447            query = self._prefixed_sql("FOR QUERY", expression, "query")
448
449            if expression.name == "PROFILE":
450                offset = self._prefixed_sql("OFFSET", expression, "offset")
451                limit = self._prefixed_sql("LIMIT", expression, "limit")
452            else:
453                offset = ""
454                limit = self._oldstyle_limit_sql(expression)
455
456            log = self._prefixed_sql("IN", expression, "log")
457            position = self._prefixed_sql("FROM", expression, "position")
458
459            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
460
461            if expression.name == "ENGINE":
462                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
463            else:
464                mutex_or_status = ""
465
466            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
467
468        def _prefixed_sql(self, prefix: str, expression: exp.Expression, arg: str) -> str:
469            sql = self.sql(expression, arg)
470            if not sql:
471                return ""
472            return f" {prefix} {sql}"
473
474        def _oldstyle_limit_sql(self, expression: exp.Show) -> str:
475            limit = self.sql(expression, "limit")
476            offset = self.sql(expression, "offset")
477            if limit:
478                limit_offset = f"{offset}, {limit}" if offset else limit
479                return f" LIMIT {limit_offset}"
480            return ""

Generator interprets the given syntax tree and produces a SQL string as an output.

Arguments:
  • time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
  • time_trie (trie): a trie of the time_mapping keys
  • pretty (bool): if set to True the returned string will be formatted. Default: False.
  • quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
  • quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
  • identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
  • identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
  • bit_start (str): specifies which starting character to use to delimit bit literals. Default: None.
  • bit_end (str): specifies which ending character to use to delimit bit literals. Default: None.
  • hex_start (str): specifies which starting character to use to delimit hex literals. Default: None.
  • hex_end (str): specifies which ending character to use to delimit hex literals. Default: None.
  • byte_start (str): specifies which starting character to use to delimit byte literals. Default: None.
  • byte_end (str): specifies which ending character to use to delimit byte literals. Default: None.
  • identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
  • normalize (bool): if set to True all identifiers will lower cased
  • string_escape (str): specifies a string escape character. Default: '.
  • identifier_escape (str): specifies an identifier escape character. Default: ".
  • pad (int): determines padding in a formatted string. Default: 2.
  • indent (int): determines the size of indentation in a formatted string. Default: 4.
  • unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
  • normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
  • alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
  • unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
  • null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
  • max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
  • leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
  • max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
  • comments: Whether or not to preserve comments in the output SQL code. Default: True
def show_sql(self, expression: sqlglot.expressions.Show) -> str:
428        def show_sql(self, expression: exp.Show) -> str:
429            this = f" {expression.name}"
430            full = " FULL" if expression.args.get("full") else ""
431            global_ = " GLOBAL" if expression.args.get("global") else ""
432
433            target = self.sql(expression, "target")
434            target = f" {target}" if target else ""
435            if expression.name in {"COLUMNS", "INDEX"}:
436                target = f" FROM{target}"
437            elif expression.name == "GRANTS":
438                target = f" FOR{target}"
439
440            db = self._prefixed_sql("FROM", expression, "db")
441
442            like = self._prefixed_sql("LIKE", expression, "like")
443            where = self.sql(expression, "where")
444
445            types = self.expressions(expression, key="types")
446            types = f" {types}" if types else types
447            query = self._prefixed_sql("FOR QUERY", expression, "query")
448
449            if expression.name == "PROFILE":
450                offset = self._prefixed_sql("OFFSET", expression, "offset")
451                limit = self._prefixed_sql("LIMIT", expression, "limit")
452            else:
453                offset = ""
454                limit = self._oldstyle_limit_sql(expression)
455
456            log = self._prefixed_sql("IN", expression, "log")
457            position = self._prefixed_sql("FROM", expression, "position")
458
459            channel = self._prefixed_sql("FOR CHANNEL", expression, "channel")
460
461            if expression.name == "ENGINE":
462                mutex_or_status = " MUTEX" if expression.args.get("mutex") else " STATUS"
463            else:
464                mutex_or_status = ""
465
466            return f"SHOW{full}{global_}{this}{target}{types}{db}{query}{log}{position}{channel}{mutex_or_status}{like}{where}{offset}{limit}"
Inherited Members
sqlglot.generator.Generator
Generator
generate
unsupported
sep
seg
pad_comment
maybe_comment
wrap
no_identify
normalize_func
indent
sql
uncache_sql
cache_sql
characterset_sql
column_sql
columnposition_sql
columndef_sql
columnconstraint_sql
autoincrementcolumnconstraint_sql
compresscolumnconstraint_sql
generatedasidentitycolumnconstraint_sql
notnullcolumnconstraint_sql
primarykeycolumnconstraint_sql
uniquecolumnconstraint_sql
create_sql
clone_sql
describe_sql
prepend_ctes
with_sql
cte_sql
tablealias_sql
bitstring_sql
hexstring_sql
bytestring_sql
datatypesize_sql
datatype_sql
directory_sql
delete_sql
drop_sql
except_sql
except_op
fetch_sql
filter_sql
hint_sql
index_sql
identifier_sql
inputoutputformat_sql
national_sql
partition_sql
properties_sql
root_properties
properties
with_properties
locate_properties
property_sql
likeproperty_sql
fallbackproperty_sql
journalproperty_sql
freespaceproperty_sql
afterjournalproperty_sql
checksumproperty_sql
mergeblockratioproperty_sql
datablocksizeproperty_sql
blockcompressionproperty_sql
isolatedloadingproperty_sql
lockingproperty_sql
withdataproperty_sql
insert_sql
intersect_sql
intersect_op
introducer_sql
pseudotype_sql
onconflict_sql
returning_sql
rowformatdelimitedproperty_sql
table_sql
tablesample_sql
pivot_sql
tuple_sql
update_sql
values_sql
var_sql
into_sql
from_sql
group_sql
having_sql
join_sql
lambda_sql
lateral_sql
limit_sql
offset_sql
setitem_sql
set_sql
pragma_sql
lock_sql
literal_sql
loaddata_sql
null_sql
boolean_sql
order_sql
cluster_sql
distribute_sql
sort_sql
ordered_sql
matchrecognize_sql
query_modifiers
after_having_modifiers
after_limit_modifiers
select_sql
schema_sql
star_sql
parameter_sql
sessionparameter_sql
placeholder_sql
subquery_sql
qualify_sql
union_sql
union_op
unnest_sql
where_sql
window_sql
partition_by_sql
windowspec_sql
withingroup_sql
between_sql
bracket_sql
all_sql
any_sql
exists_sql
case_sql
constraint_sql
nextvaluefor_sql
extract_sql
trim_sql
concat_sql
check_sql
foreignkey_sql
primarykey_sql
unique_sql
if_sql
matchagainst_sql
jsonkeyvalue_sql
jsonobject_sql
openjsoncolumndef_sql
openjson_sql
in_sql
in_unnest_op
interval_sql
return_sql
reference_sql
anonymous_sql
paren_sql
neg_sql
not_sql
alias_sql
aliases_sql
attimezone_sql
add_sql
and_sql
connector_sql
bitwiseand_sql
bitwiseleftshift_sql
bitwisenot_sql
bitwiseor_sql
bitwiserightshift_sql
bitwisexor_sql
cast_sql
currentdate_sql
collate_sql
command_sql
comment_sql
mergetreettlaction_sql
mergetreettl_sql
transaction_sql
commit_sql
rollback_sql
altercolumn_sql
renametable_sql
altertable_sql
droppartition_sql
addconstraint_sql
distinct_sql
ignorenulls_sql
respectnulls_sql
intdiv_sql
dpipe_sql
div_sql
overlaps_sql
distance_sql
dot_sql
eq_sql
escape_sql
glob_sql
gt_sql
gte_sql
ilike_sql
ilikeany_sql
is_sql
like_sql
likeany_sql
similarto_sql
lt_sql
lte_sql
mod_sql
mul_sql
neq_sql
nullsafeeq_sql
nullsafeneq_sql
or_sql
slice_sql
sub_sql
trycast_sql
use_sql
binary
function_fallback_sql
func
format_args
text_width
format_time
expressions
op_expressions
naked_property
set_operation
tag_sql
token_sql
userdefinedfunction_sql
joinhint_sql
kwarg_sql
when_sql
merge_sql
tochar_sql