sqlglot.dialects.sqlite
1from __future__ import annotations 2 3from sqlglot import exp, generator, parser, tokens, transforms 4from sqlglot.dialects.dialect import ( 5 Dialect, 6 arrow_json_extract_scalar_sql, 7 arrow_json_extract_sql, 8 count_if_to_sum, 9 no_ilike_sql, 10 no_tablesample_sql, 11 no_trycast_sql, 12 rename_func, 13) 14from sqlglot.tokens import TokenType 15 16 17def _date_add_sql(self, expression): 18 modifier = expression.expression 19 modifier = modifier.name if modifier.is_string else self.sql(modifier) 20 unit = expression.args.get("unit") 21 modifier = f"'{modifier} {unit.name}'" if unit else f"'{modifier}'" 22 return self.func("DATE", expression.this, modifier) 23 24 25def _transform_create(expression: exp.Expression) -> exp.Expression: 26 """Move primary key to a column and enforce auto_increment on primary keys.""" 27 schema = expression.this 28 29 if isinstance(expression, exp.Create) and isinstance(schema, exp.Schema): 30 defs = {} 31 primary_key = None 32 33 for e in schema.expressions: 34 if isinstance(e, exp.ColumnDef): 35 defs[e.name] = e 36 elif isinstance(e, exp.PrimaryKey): 37 primary_key = e 38 39 if primary_key and len(primary_key.expressions) == 1: 40 column = defs[primary_key.expressions[0].name] 41 column.append( 42 "constraints", exp.ColumnConstraint(kind=exp.PrimaryKeyColumnConstraint()) 43 ) 44 schema.expressions.remove(primary_key) 45 else: 46 for column in defs.values(): 47 auto_increment = None 48 for constraint in column.constraints.copy(): 49 if isinstance(constraint.kind, exp.PrimaryKeyColumnConstraint): 50 break 51 if isinstance(constraint.kind, exp.AutoIncrementColumnConstraint): 52 auto_increment = constraint 53 if auto_increment: 54 column.constraints.remove(auto_increment) 55 56 return expression 57 58 59class SQLite(Dialect): 60 class Tokenizer(tokens.Tokenizer): 61 IDENTIFIERS = ['"', ("[", "]"), "`"] 62 HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", ""), ("0X", "")] 63 64 KEYWORDS = { 65 **tokens.Tokenizer.KEYWORDS, 66 } 67 68 class Parser(parser.Parser): 69 FUNCTIONS = { 70 **parser.Parser.FUNCTIONS, # type: ignore 71 "EDITDIST3": exp.Levenshtein.from_arg_list, 72 } 73 74 class Generator(generator.Generator): 75 JOIN_HINTS = False 76 TABLE_HINTS = False 77 78 TYPE_MAPPING = { 79 **generator.Generator.TYPE_MAPPING, # type: ignore 80 exp.DataType.Type.BOOLEAN: "INTEGER", 81 exp.DataType.Type.TINYINT: "INTEGER", 82 exp.DataType.Type.SMALLINT: "INTEGER", 83 exp.DataType.Type.INT: "INTEGER", 84 exp.DataType.Type.BIGINT: "INTEGER", 85 exp.DataType.Type.FLOAT: "REAL", 86 exp.DataType.Type.DOUBLE: "REAL", 87 exp.DataType.Type.DECIMAL: "REAL", 88 exp.DataType.Type.CHAR: "TEXT", 89 exp.DataType.Type.NCHAR: "TEXT", 90 exp.DataType.Type.VARCHAR: "TEXT", 91 exp.DataType.Type.NVARCHAR: "TEXT", 92 exp.DataType.Type.BINARY: "BLOB", 93 exp.DataType.Type.VARBINARY: "BLOB", 94 } 95 96 TOKEN_MAPPING = { 97 TokenType.AUTO_INCREMENT: "AUTOINCREMENT", 98 } 99 100 TRANSFORMS = { 101 **generator.Generator.TRANSFORMS, # type: ignore 102 exp.CountIf: count_if_to_sum, 103 exp.Create: transforms.preprocess([_transform_create]), 104 exp.CurrentDate: lambda *_: "CURRENT_DATE", 105 exp.CurrentTime: lambda *_: "CURRENT_TIME", 106 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 107 exp.DateAdd: _date_add_sql, 108 exp.DateStrToDate: lambda self, e: self.sql(e, "this"), 109 exp.ILike: no_ilike_sql, 110 exp.JSONExtract: arrow_json_extract_sql, 111 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, 112 exp.JSONBExtract: arrow_json_extract_sql, 113 exp.JSONBExtractScalar: arrow_json_extract_scalar_sql, 114 exp.Levenshtein: rename_func("EDITDIST3"), 115 exp.LogicalOr: rename_func("MAX"), 116 exp.LogicalAnd: rename_func("MIN"), 117 exp.Select: transforms.preprocess( 118 [transforms.eliminate_distinct_on, transforms.eliminate_qualify] 119 ), 120 exp.TableSample: no_tablesample_sql, 121 exp.TimeStrToTime: lambda self, e: self.sql(e, "this"), 122 exp.TryCast: no_trycast_sql, 123 } 124 125 PROPERTIES_LOCATION = { 126 k: exp.Properties.Location.UNSUPPORTED 127 for k, v in generator.Generator.PROPERTIES_LOCATION.items() 128 } 129 130 LIMIT_FETCH = "LIMIT" 131 132 def cast_sql(self, expression: exp.Cast) -> str: 133 if expression.to.this == exp.DataType.Type.DATE: 134 return self.func("DATE", expression.this) 135 136 return super().cast_sql(expression) 137 138 def datediff_sql(self, expression: exp.DateDiff) -> str: 139 unit = expression.args.get("unit") 140 unit = unit.name.upper() if unit else "DAY" 141 142 sql = f"(JULIANDAY({self.sql(expression, 'this')}) - JULIANDAY({self.sql(expression, 'expression')}))" 143 144 if unit == "MONTH": 145 sql = f"{sql} / 30.0" 146 elif unit == "YEAR": 147 sql = f"{sql} / 365.0" 148 elif unit == "HOUR": 149 sql = f"{sql} * 24.0" 150 elif unit == "MINUTE": 151 sql = f"{sql} * 1440.0" 152 elif unit == "SECOND": 153 sql = f"{sql} * 86400.0" 154 elif unit == "MILLISECOND": 155 sql = f"{sql} * 86400000.0" 156 elif unit == "MICROSECOND": 157 sql = f"{sql} * 86400000000.0" 158 elif unit == "NANOSECOND": 159 sql = f"{sql} * 8640000000000.0" 160 else: 161 self.unsupported("DATEDIFF unsupported for '{unit}'.") 162 163 return f"CAST({sql} AS INTEGER)" 164 165 # https://www.sqlite.org/lang_aggfunc.html#group_concat 166 def groupconcat_sql(self, expression): 167 this = expression.this 168 distinct = expression.find(exp.Distinct) 169 if distinct: 170 this = distinct.expressions[0] 171 distinct = "DISTINCT " 172 173 if isinstance(expression.this, exp.Order): 174 self.unsupported("SQLite GROUP_CONCAT doesn't support ORDER BY.") 175 if expression.this.this and not distinct: 176 this = expression.this.this 177 178 separator = expression.args.get("separator") 179 return f"GROUP_CONCAT({distinct or ''}{self.format_args(this, separator)})" 180 181 def least_sql(self, expression: exp.Least) -> str: 182 if len(expression.expressions) > 1: 183 return rename_func("MIN")(self, expression) 184 185 return self.expressions(expression) 186 187 def transaction_sql(self, expression: exp.Transaction) -> str: 188 this = expression.this 189 this = f" {this}" if this else "" 190 return f"BEGIN{this} TRANSACTION"
60class SQLite(Dialect): 61 class Tokenizer(tokens.Tokenizer): 62 IDENTIFIERS = ['"', ("[", "]"), "`"] 63 HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", ""), ("0X", "")] 64 65 KEYWORDS = { 66 **tokens.Tokenizer.KEYWORDS, 67 } 68 69 class Parser(parser.Parser): 70 FUNCTIONS = { 71 **parser.Parser.FUNCTIONS, # type: ignore 72 "EDITDIST3": exp.Levenshtein.from_arg_list, 73 } 74 75 class Generator(generator.Generator): 76 JOIN_HINTS = False 77 TABLE_HINTS = False 78 79 TYPE_MAPPING = { 80 **generator.Generator.TYPE_MAPPING, # type: ignore 81 exp.DataType.Type.BOOLEAN: "INTEGER", 82 exp.DataType.Type.TINYINT: "INTEGER", 83 exp.DataType.Type.SMALLINT: "INTEGER", 84 exp.DataType.Type.INT: "INTEGER", 85 exp.DataType.Type.BIGINT: "INTEGER", 86 exp.DataType.Type.FLOAT: "REAL", 87 exp.DataType.Type.DOUBLE: "REAL", 88 exp.DataType.Type.DECIMAL: "REAL", 89 exp.DataType.Type.CHAR: "TEXT", 90 exp.DataType.Type.NCHAR: "TEXT", 91 exp.DataType.Type.VARCHAR: "TEXT", 92 exp.DataType.Type.NVARCHAR: "TEXT", 93 exp.DataType.Type.BINARY: "BLOB", 94 exp.DataType.Type.VARBINARY: "BLOB", 95 } 96 97 TOKEN_MAPPING = { 98 TokenType.AUTO_INCREMENT: "AUTOINCREMENT", 99 } 100 101 TRANSFORMS = { 102 **generator.Generator.TRANSFORMS, # type: ignore 103 exp.CountIf: count_if_to_sum, 104 exp.Create: transforms.preprocess([_transform_create]), 105 exp.CurrentDate: lambda *_: "CURRENT_DATE", 106 exp.CurrentTime: lambda *_: "CURRENT_TIME", 107 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 108 exp.DateAdd: _date_add_sql, 109 exp.DateStrToDate: lambda self, e: self.sql(e, "this"), 110 exp.ILike: no_ilike_sql, 111 exp.JSONExtract: arrow_json_extract_sql, 112 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, 113 exp.JSONBExtract: arrow_json_extract_sql, 114 exp.JSONBExtractScalar: arrow_json_extract_scalar_sql, 115 exp.Levenshtein: rename_func("EDITDIST3"), 116 exp.LogicalOr: rename_func("MAX"), 117 exp.LogicalAnd: rename_func("MIN"), 118 exp.Select: transforms.preprocess( 119 [transforms.eliminate_distinct_on, transforms.eliminate_qualify] 120 ), 121 exp.TableSample: no_tablesample_sql, 122 exp.TimeStrToTime: lambda self, e: self.sql(e, "this"), 123 exp.TryCast: no_trycast_sql, 124 } 125 126 PROPERTIES_LOCATION = { 127 k: exp.Properties.Location.UNSUPPORTED 128 for k, v in generator.Generator.PROPERTIES_LOCATION.items() 129 } 130 131 LIMIT_FETCH = "LIMIT" 132 133 def cast_sql(self, expression: exp.Cast) -> str: 134 if expression.to.this == exp.DataType.Type.DATE: 135 return self.func("DATE", expression.this) 136 137 return super().cast_sql(expression) 138 139 def datediff_sql(self, expression: exp.DateDiff) -> str: 140 unit = expression.args.get("unit") 141 unit = unit.name.upper() if unit else "DAY" 142 143 sql = f"(JULIANDAY({self.sql(expression, 'this')}) - JULIANDAY({self.sql(expression, 'expression')}))" 144 145 if unit == "MONTH": 146 sql = f"{sql} / 30.0" 147 elif unit == "YEAR": 148 sql = f"{sql} / 365.0" 149 elif unit == "HOUR": 150 sql = f"{sql} * 24.0" 151 elif unit == "MINUTE": 152 sql = f"{sql} * 1440.0" 153 elif unit == "SECOND": 154 sql = f"{sql} * 86400.0" 155 elif unit == "MILLISECOND": 156 sql = f"{sql} * 86400000.0" 157 elif unit == "MICROSECOND": 158 sql = f"{sql} * 86400000000.0" 159 elif unit == "NANOSECOND": 160 sql = f"{sql} * 8640000000000.0" 161 else: 162 self.unsupported("DATEDIFF unsupported for '{unit}'.") 163 164 return f"CAST({sql} AS INTEGER)" 165 166 # https://www.sqlite.org/lang_aggfunc.html#group_concat 167 def groupconcat_sql(self, expression): 168 this = expression.this 169 distinct = expression.find(exp.Distinct) 170 if distinct: 171 this = distinct.expressions[0] 172 distinct = "DISTINCT " 173 174 if isinstance(expression.this, exp.Order): 175 self.unsupported("SQLite GROUP_CONCAT doesn't support ORDER BY.") 176 if expression.this.this and not distinct: 177 this = expression.this.this 178 179 separator = expression.args.get("separator") 180 return f"GROUP_CONCAT({distinct or ''}{self.format_args(this, separator)})" 181 182 def least_sql(self, expression: exp.Least) -> str: 183 if len(expression.expressions) > 1: 184 return rename_func("MIN")(self, expression) 185 186 return self.expressions(expression) 187 188 def transaction_sql(self, expression: exp.Transaction) -> str: 189 this = expression.this 190 this = f" {this}" if this else "" 191 return f"BEGIN{this} TRANSACTION"
61 class Tokenizer(tokens.Tokenizer): 62 IDENTIFIERS = ['"', ("[", "]"), "`"] 63 HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", ""), ("0X", "")] 64 65 KEYWORDS = { 66 **tokens.Tokenizer.KEYWORDS, 67 }
Inherited Members
69 class Parser(parser.Parser): 70 FUNCTIONS = { 71 **parser.Parser.FUNCTIONS, # type: ignore 72 "EDITDIST3": exp.Levenshtein.from_arg_list, 73 }
Parser consumes a list of tokens produced by the sqlglot.tokens.Tokenizer
and produces
a parsed syntax tree.
Arguments:
- error_level: the desired error level. Default: ErrorLevel.RAISE
- error_message_context: determines the amount of context to capture from a query string when displaying the error message (in number of characters). Default: 50.
- index_offset: Index offset for arrays eg ARRAY[0] vs ARRAY[1] as the head of a list. Default: 0
- alias_post_tablesample: If the table alias comes after tablesample. Default: False
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
- null_ordering: Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
Inherited Members
75 class Generator(generator.Generator): 76 JOIN_HINTS = False 77 TABLE_HINTS = False 78 79 TYPE_MAPPING = { 80 **generator.Generator.TYPE_MAPPING, # type: ignore 81 exp.DataType.Type.BOOLEAN: "INTEGER", 82 exp.DataType.Type.TINYINT: "INTEGER", 83 exp.DataType.Type.SMALLINT: "INTEGER", 84 exp.DataType.Type.INT: "INTEGER", 85 exp.DataType.Type.BIGINT: "INTEGER", 86 exp.DataType.Type.FLOAT: "REAL", 87 exp.DataType.Type.DOUBLE: "REAL", 88 exp.DataType.Type.DECIMAL: "REAL", 89 exp.DataType.Type.CHAR: "TEXT", 90 exp.DataType.Type.NCHAR: "TEXT", 91 exp.DataType.Type.VARCHAR: "TEXT", 92 exp.DataType.Type.NVARCHAR: "TEXT", 93 exp.DataType.Type.BINARY: "BLOB", 94 exp.DataType.Type.VARBINARY: "BLOB", 95 } 96 97 TOKEN_MAPPING = { 98 TokenType.AUTO_INCREMENT: "AUTOINCREMENT", 99 } 100 101 TRANSFORMS = { 102 **generator.Generator.TRANSFORMS, # type: ignore 103 exp.CountIf: count_if_to_sum, 104 exp.Create: transforms.preprocess([_transform_create]), 105 exp.CurrentDate: lambda *_: "CURRENT_DATE", 106 exp.CurrentTime: lambda *_: "CURRENT_TIME", 107 exp.CurrentTimestamp: lambda *_: "CURRENT_TIMESTAMP", 108 exp.DateAdd: _date_add_sql, 109 exp.DateStrToDate: lambda self, e: self.sql(e, "this"), 110 exp.ILike: no_ilike_sql, 111 exp.JSONExtract: arrow_json_extract_sql, 112 exp.JSONExtractScalar: arrow_json_extract_scalar_sql, 113 exp.JSONBExtract: arrow_json_extract_sql, 114 exp.JSONBExtractScalar: arrow_json_extract_scalar_sql, 115 exp.Levenshtein: rename_func("EDITDIST3"), 116 exp.LogicalOr: rename_func("MAX"), 117 exp.LogicalAnd: rename_func("MIN"), 118 exp.Select: transforms.preprocess( 119 [transforms.eliminate_distinct_on, transforms.eliminate_qualify] 120 ), 121 exp.TableSample: no_tablesample_sql, 122 exp.TimeStrToTime: lambda self, e: self.sql(e, "this"), 123 exp.TryCast: no_trycast_sql, 124 } 125 126 PROPERTIES_LOCATION = { 127 k: exp.Properties.Location.UNSUPPORTED 128 for k, v in generator.Generator.PROPERTIES_LOCATION.items() 129 } 130 131 LIMIT_FETCH = "LIMIT" 132 133 def cast_sql(self, expression: exp.Cast) -> str: 134 if expression.to.this == exp.DataType.Type.DATE: 135 return self.func("DATE", expression.this) 136 137 return super().cast_sql(expression) 138 139 def datediff_sql(self, expression: exp.DateDiff) -> str: 140 unit = expression.args.get("unit") 141 unit = unit.name.upper() if unit else "DAY" 142 143 sql = f"(JULIANDAY({self.sql(expression, 'this')}) - JULIANDAY({self.sql(expression, 'expression')}))" 144 145 if unit == "MONTH": 146 sql = f"{sql} / 30.0" 147 elif unit == "YEAR": 148 sql = f"{sql} / 365.0" 149 elif unit == "HOUR": 150 sql = f"{sql} * 24.0" 151 elif unit == "MINUTE": 152 sql = f"{sql} * 1440.0" 153 elif unit == "SECOND": 154 sql = f"{sql} * 86400.0" 155 elif unit == "MILLISECOND": 156 sql = f"{sql} * 86400000.0" 157 elif unit == "MICROSECOND": 158 sql = f"{sql} * 86400000000.0" 159 elif unit == "NANOSECOND": 160 sql = f"{sql} * 8640000000000.0" 161 else: 162 self.unsupported("DATEDIFF unsupported for '{unit}'.") 163 164 return f"CAST({sql} AS INTEGER)" 165 166 # https://www.sqlite.org/lang_aggfunc.html#group_concat 167 def groupconcat_sql(self, expression): 168 this = expression.this 169 distinct = expression.find(exp.Distinct) 170 if distinct: 171 this = distinct.expressions[0] 172 distinct = "DISTINCT " 173 174 if isinstance(expression.this, exp.Order): 175 self.unsupported("SQLite GROUP_CONCAT doesn't support ORDER BY.") 176 if expression.this.this and not distinct: 177 this = expression.this.this 178 179 separator = expression.args.get("separator") 180 return f"GROUP_CONCAT({distinct or ''}{self.format_args(this, separator)})" 181 182 def least_sql(self, expression: exp.Least) -> str: 183 if len(expression.expressions) > 1: 184 return rename_func("MIN")(self, expression) 185 186 return self.expressions(expression) 187 188 def transaction_sql(self, expression: exp.Transaction) -> str: 189 this = expression.this 190 this = f" {this}" if this else "" 191 return f"BEGIN{this} TRANSACTION"
Generator interprets the given syntax tree and produces a SQL string as an output.
Arguments:
- time_mapping (dict): the dictionary of custom time mappings in which the key represents a python time format and the output the target time format
- time_trie (trie): a trie of the time_mapping keys
- pretty (bool): if set to True the returned string will be formatted. Default: False.
- quote_start (str): specifies which starting character to use to delimit quotes. Default: '.
- quote_end (str): specifies which ending character to use to delimit quotes. Default: '.
- identifier_start (str): specifies which starting character to use to delimit identifiers. Default: ".
- identifier_end (str): specifies which ending character to use to delimit identifiers. Default: ".
- bit_start (str): specifies which starting character to use to delimit bit literals. Default: None.
- bit_end (str): specifies which ending character to use to delimit bit literals. Default: None.
- hex_start (str): specifies which starting character to use to delimit hex literals. Default: None.
- hex_end (str): specifies which ending character to use to delimit hex literals. Default: None.
- byte_start (str): specifies which starting character to use to delimit byte literals. Default: None.
- byte_end (str): specifies which ending character to use to delimit byte literals. Default: None.
- identify (bool | str): 'always': always quote, 'safe': quote identifiers if they don't contain an upcase, True defaults to always.
- normalize (bool): if set to True all identifiers will lower cased
- string_escape (str): specifies a string escape character. Default: '.
- identifier_escape (str): specifies an identifier escape character. Default: ".
- pad (int): determines padding in a formatted string. Default: 2.
- indent (int): determines the size of indentation in a formatted string. Default: 4.
- unnest_column_only (bool): if true unnest table aliases are considered only as column aliases
- normalize_functions (str): normalize function names, "upper", "lower", or None Default: "upper"
- alias_post_tablesample (bool): if the table alias comes after tablesample Default: False
- unsupported_level (ErrorLevel): determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- null_ordering (str): Indicates the default null ordering method to use if not explicitly set. Options are "nulls_are_small", "nulls_are_large", "nulls_are_last". Default: "nulls_are_small"
- max_unsupported (int): Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma (bool): if the the comma is leading or trailing in select statements Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether or not to preserve comments in the output SQL code. Default: True
139 def datediff_sql(self, expression: exp.DateDiff) -> str: 140 unit = expression.args.get("unit") 141 unit = unit.name.upper() if unit else "DAY" 142 143 sql = f"(JULIANDAY({self.sql(expression, 'this')}) - JULIANDAY({self.sql(expression, 'expression')}))" 144 145 if unit == "MONTH": 146 sql = f"{sql} / 30.0" 147 elif unit == "YEAR": 148 sql = f"{sql} / 365.0" 149 elif unit == "HOUR": 150 sql = f"{sql} * 24.0" 151 elif unit == "MINUTE": 152 sql = f"{sql} * 1440.0" 153 elif unit == "SECOND": 154 sql = f"{sql} * 86400.0" 155 elif unit == "MILLISECOND": 156 sql = f"{sql} * 86400000.0" 157 elif unit == "MICROSECOND": 158 sql = f"{sql} * 86400000000.0" 159 elif unit == "NANOSECOND": 160 sql = f"{sql} * 8640000000000.0" 161 else: 162 self.unsupported("DATEDIFF unsupported for '{unit}'.") 163 164 return f"CAST({sql} AS INTEGER)"
def
groupconcat_sql(self, expression):
167 def groupconcat_sql(self, expression): 168 this = expression.this 169 distinct = expression.find(exp.Distinct) 170 if distinct: 171 this = distinct.expressions[0] 172 distinct = "DISTINCT " 173 174 if isinstance(expression.this, exp.Order): 175 self.unsupported("SQLite GROUP_CONCAT doesn't support ORDER BY.") 176 if expression.this.this and not distinct: 177 this = expression.this.this 178 179 separator = expression.args.get("separator") 180 return f"GROUP_CONCAT({distinct or ''}{self.format_args(this, separator)})"
Inherited Members
- sqlglot.generator.Generator
- Generator
- generate
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- notnullcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- create_sql
- clone_sql
- describe_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- datatypesize_sql
- datatype_sql
- directory_sql
- delete_sql
- drop_sql
- except_sql
- except_op
- fetch_sql
- filter_sql
- hint_sql
- index_sql
- identifier_sql
- inputoutputformat_sql
- national_sql
- partition_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- afterjournalproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- insert_sql
- intersect_sql
- intersect_op
- introducer_sql
- pseudotype_sql
- onconflict_sql
- returning_sql
- rowformatdelimitedproperty_sql
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- group_sql
- having_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- offset_sql
- setitem_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- loaddata_sql
- null_sql
- boolean_sql
- order_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognize_sql
- query_modifiers
- after_having_modifiers
- after_limit_modifiers
- select_sql
- schema_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- union_sql
- union_op
- unnest_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- constraint_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- concat_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- unique_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonobject_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- aliases_sql
- attimezone_sql
- add_sql
- and_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- commit_sql
- rollback_sql
- altercolumn_sql
- renametable_sql
- altertable_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- intdiv_sql
- dpipe_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- is_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- or_sql
- slice_sql
- sub_sql
- trycast_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- text_width
- format_time
- expressions
- op_expressions
- naked_property
- set_operation
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql