Coverage for /home/martinb/.local/share/virtualenvs/camcops/lib/python3.6/site-packages/alembic/autogenerate/compare.py : 12%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import contextlib
2import logging
3import re
5from sqlalchemy import event
6from sqlalchemy import inspect
7from sqlalchemy import schema as sa_schema
8from sqlalchemy import types as sqltypes
9from sqlalchemy.util import OrderedSet
11from alembic.ddl.base import _fk_spec
12from .render import _user_defined_render
13from .. import util
14from ..operations import ops
15from ..util import compat
16from ..util import sqla_compat
18log = logging.getLogger(__name__)
21def _populate_migration_script(autogen_context, migration_script):
22 upgrade_ops = migration_script.upgrade_ops_list[-1]
23 downgrade_ops = migration_script.downgrade_ops_list[-1]
25 _produce_net_changes(autogen_context, upgrade_ops)
26 upgrade_ops.reverse_into(downgrade_ops)
29comparators = util.Dispatcher(uselist=True)
32def _produce_net_changes(autogen_context, upgrade_ops):
34 connection = autogen_context.connection
35 include_schemas = autogen_context.opts.get("include_schemas", False)
37 inspector = inspect(connection)
39 default_schema = connection.dialect.default_schema_name
40 if include_schemas:
41 schemas = set(inspector.get_schema_names())
42 # replace default schema name with None
43 schemas.discard("information_schema")
44 # replace the "default" schema with None
45 schemas.discard(default_schema)
46 schemas.add(None)
47 else:
48 schemas = [None]
50 comparators.dispatch("schema", autogen_context.dialect.name)(
51 autogen_context, upgrade_ops, schemas
52 )
55@comparators.dispatch_for("schema")
56def _autogen_for_tables(autogen_context, upgrade_ops, schemas):
57 inspector = autogen_context.inspector
59 conn_table_names = set()
61 version_table_schema = (
62 autogen_context.migration_context.version_table_schema
63 )
64 version_table = autogen_context.migration_context.version_table
66 for s in schemas:
67 tables = set(inspector.get_table_names(schema=s))
68 if s == version_table_schema:
69 tables = tables.difference(
70 [autogen_context.migration_context.version_table]
71 )
72 conn_table_names.update(zip([s] * len(tables), tables))
74 metadata_table_names = OrderedSet(
75 [(table.schema, table.name) for table in autogen_context.sorted_tables]
76 ).difference([(version_table_schema, version_table)])
78 _compare_tables(
79 conn_table_names,
80 metadata_table_names,
81 inspector,
82 upgrade_ops,
83 autogen_context,
84 )
87def _compare_tables(
88 conn_table_names,
89 metadata_table_names,
90 inspector,
91 upgrade_ops,
92 autogen_context,
93):
95 default_schema = inspector.bind.dialect.default_schema_name
97 # tables coming from the connection will not have "schema"
98 # set if it matches default_schema_name; so we need a list
99 # of table names from local metadata that also have "None" if schema
100 # == default_schema_name. Most setups will be like this anyway but
101 # some are not (see #170)
102 metadata_table_names_no_dflt_schema = OrderedSet(
103 [
104 (schema if schema != default_schema else None, tname)
105 for schema, tname in metadata_table_names
106 ]
107 )
109 # to adjust for the MetaData collection storing the tables either
110 # as "schemaname.tablename" or just "tablename", create a new lookup
111 # which will match the "non-default-schema" keys to the Table object.
112 tname_to_table = dict(
113 (
114 no_dflt_schema,
115 autogen_context.table_key_to_table[
116 sa_schema._get_table_key(tname, schema)
117 ],
118 )
119 for no_dflt_schema, (schema, tname) in zip(
120 metadata_table_names_no_dflt_schema, metadata_table_names
121 )
122 )
123 metadata_table_names = metadata_table_names_no_dflt_schema
125 for s, tname in metadata_table_names.difference(conn_table_names):
126 name = "%s.%s" % (s, tname) if s else tname
127 metadata_table = tname_to_table[(s, tname)]
128 if autogen_context.run_filters(
129 metadata_table, tname, "table", False, None
130 ):
131 upgrade_ops.ops.append(
132 ops.CreateTableOp.from_table(metadata_table)
133 )
134 log.info("Detected added table %r", name)
135 modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
137 comparators.dispatch("table")(
138 autogen_context,
139 modify_table_ops,
140 s,
141 tname,
142 None,
143 metadata_table,
144 )
145 if not modify_table_ops.is_empty():
146 upgrade_ops.ops.append(modify_table_ops)
148 removal_metadata = sa_schema.MetaData()
149 for s, tname in conn_table_names.difference(metadata_table_names):
150 name = sa_schema._get_table_key(tname, s)
151 exists = name in removal_metadata.tables
152 t = sa_schema.Table(tname, removal_metadata, schema=s)
154 if not exists:
155 event.listen(
156 t,
157 "column_reflect",
158 # fmt: off
159 autogen_context.migration_context.impl.
160 _compat_autogen_column_reflect
161 (inspector),
162 # fmt: on
163 )
164 inspector.reflecttable(t, None)
165 if autogen_context.run_filters(t, tname, "table", True, None):
167 modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
169 comparators.dispatch("table")(
170 autogen_context, modify_table_ops, s, tname, t, None
171 )
172 if not modify_table_ops.is_empty():
173 upgrade_ops.ops.append(modify_table_ops)
175 upgrade_ops.ops.append(ops.DropTableOp.from_table(t))
176 log.info("Detected removed table %r", name)
178 existing_tables = conn_table_names.intersection(metadata_table_names)
180 existing_metadata = sa_schema.MetaData()
181 conn_column_info = {}
182 for s, tname in existing_tables:
183 name = sa_schema._get_table_key(tname, s)
184 exists = name in existing_metadata.tables
185 t = sa_schema.Table(tname, existing_metadata, schema=s)
186 if not exists:
187 event.listen(
188 t,
189 "column_reflect",
190 # fmt: off
191 autogen_context.migration_context.impl.
192 _compat_autogen_column_reflect(inspector),
193 # fmt: on
194 )
195 inspector.reflecttable(t, None)
196 conn_column_info[(s, tname)] = t
198 for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])):
199 s = s or None
200 name = "%s.%s" % (s, tname) if s else tname
201 metadata_table = tname_to_table[(s, tname)]
202 conn_table = existing_metadata.tables[name]
204 if autogen_context.run_filters(
205 metadata_table, tname, "table", False, conn_table
206 ):
208 modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
209 with _compare_columns(
210 s,
211 tname,
212 conn_table,
213 metadata_table,
214 modify_table_ops,
215 autogen_context,
216 inspector,
217 ):
219 comparators.dispatch("table")(
220 autogen_context,
221 modify_table_ops,
222 s,
223 tname,
224 conn_table,
225 metadata_table,
226 )
228 if not modify_table_ops.is_empty():
229 upgrade_ops.ops.append(modify_table_ops)
232def _make_index(params, conn_table):
233 ix = sa_schema.Index(
234 params["name"],
235 *[conn_table.c[cname] for cname in params["column_names"]],
236 unique=params["unique"]
237 )
238 if "duplicates_constraint" in params:
239 ix.info["duplicates_constraint"] = params["duplicates_constraint"]
240 return ix
243def _make_unique_constraint(params, conn_table):
244 uq = sa_schema.UniqueConstraint(
245 *[conn_table.c[cname] for cname in params["column_names"]],
246 name=params["name"]
247 )
248 if "duplicates_index" in params:
249 uq.info["duplicates_index"] = params["duplicates_index"]
251 return uq
254def _make_foreign_key(params, conn_table):
255 tname = params["referred_table"]
256 if params["referred_schema"]:
257 tname = "%s.%s" % (params["referred_schema"], tname)
259 options = params.get("options", {})
261 const = sa_schema.ForeignKeyConstraint(
262 [conn_table.c[cname] for cname in params["constrained_columns"]],
263 ["%s.%s" % (tname, n) for n in params["referred_columns"]],
264 onupdate=options.get("onupdate"),
265 ondelete=options.get("ondelete"),
266 deferrable=options.get("deferrable"),
267 initially=options.get("initially"),
268 name=params["name"],
269 )
270 # needed by 0.7
271 conn_table.append_constraint(const)
272 return const
275@contextlib.contextmanager
276def _compare_columns(
277 schema,
278 tname,
279 conn_table,
280 metadata_table,
281 modify_table_ops,
282 autogen_context,
283 inspector,
284):
285 name = "%s.%s" % (schema, tname) if schema else tname
286 metadata_cols_by_name = dict(
287 (c.name, c) for c in metadata_table.c if not c.system
288 )
289 conn_col_names = dict((c.name, c) for c in conn_table.c)
290 metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))
292 for cname in metadata_col_names.difference(conn_col_names):
293 if autogen_context.run_filters(
294 metadata_cols_by_name[cname], cname, "column", False, None
295 ):
296 modify_table_ops.ops.append(
297 ops.AddColumnOp.from_column_and_tablename(
298 schema, tname, metadata_cols_by_name[cname]
299 )
300 )
301 log.info("Detected added column '%s.%s'", name, cname)
303 for colname in metadata_col_names.intersection(conn_col_names):
304 metadata_col = metadata_cols_by_name[colname]
305 conn_col = conn_table.c[colname]
306 if not autogen_context.run_filters(
307 metadata_col, colname, "column", False, conn_col
308 ):
309 continue
310 alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema)
312 comparators.dispatch("column")(
313 autogen_context,
314 alter_column_op,
315 schema,
316 tname,
317 colname,
318 conn_col,
319 metadata_col,
320 )
322 if alter_column_op.has_changes():
323 modify_table_ops.ops.append(alter_column_op)
325 yield
327 for cname in set(conn_col_names).difference(metadata_col_names):
328 if autogen_context.run_filters(
329 conn_table.c[cname], cname, "column", True, None
330 ):
331 modify_table_ops.ops.append(
332 ops.DropColumnOp.from_column_and_tablename(
333 schema, tname, conn_table.c[cname]
334 )
335 )
336 log.info("Detected removed column '%s.%s'", name, cname)
339class _constraint_sig(object):
340 def md_name_to_sql_name(self, context):
341 return sqla_compat._get_constraint_final_name(
342 self.const, context.dialect
343 )
345 def __eq__(self, other):
346 return self.const == other.const
348 def __ne__(self, other):
349 return self.const != other.const
351 def __hash__(self):
352 return hash(self.const)
355class _uq_constraint_sig(_constraint_sig):
356 is_index = False
357 is_unique = True
359 def __init__(self, const):
360 self.const = const
361 self.name = const.name
362 self.sig = tuple(sorted([col.name for col in const.columns]))
364 @property
365 def column_names(self):
366 return [col.name for col in self.const.columns]
369class _ix_constraint_sig(_constraint_sig):
370 is_index = True
372 def __init__(self, const):
373 self.const = const
374 self.name = const.name
375 self.sig = tuple(sorted([col.name for col in const.columns]))
376 self.is_unique = bool(const.unique)
378 def md_name_to_sql_name(self, context):
379 return sqla_compat._get_constraint_final_name(
380 self.const, context.dialect
381 )
383 @property
384 def column_names(self):
385 return sqla_compat._get_index_column_names(self.const)
388class _fk_constraint_sig(_constraint_sig):
389 def __init__(self, const, include_options=False):
390 self.const = const
391 self.name = const.name
393 (
394 self.source_schema,
395 self.source_table,
396 self.source_columns,
397 self.target_schema,
398 self.target_table,
399 self.target_columns,
400 onupdate,
401 ondelete,
402 deferrable,
403 initially,
404 ) = _fk_spec(const)
406 self.sig = (
407 self.source_schema,
408 self.source_table,
409 tuple(self.source_columns),
410 self.target_schema,
411 self.target_table,
412 tuple(self.target_columns),
413 )
414 if include_options:
415 self.sig += (
416 (None if onupdate.lower() == "no action" else onupdate.lower())
417 if onupdate
418 else None,
419 (None if ondelete.lower() == "no action" else ondelete.lower())
420 if ondelete
421 else None,
422 # convert initially + deferrable into one three-state value
423 "initially_deferrable"
424 if initially and initially.lower() == "deferred"
425 else "deferrable"
426 if deferrable
427 else "not deferrable",
428 )
431@comparators.dispatch_for("table")
432def _compare_indexes_and_uniques(
433 autogen_context, modify_ops, schema, tname, conn_table, metadata_table
434):
436 inspector = autogen_context.inspector
437 is_create_table = conn_table is None
438 is_drop_table = metadata_table is None
440 # 1a. get raw indexes and unique constraints from metadata ...
441 if metadata_table is not None:
442 metadata_unique_constraints = set(
443 uq
444 for uq in metadata_table.constraints
445 if isinstance(uq, sa_schema.UniqueConstraint)
446 )
447 metadata_indexes = set(metadata_table.indexes)
448 else:
449 metadata_unique_constraints = set()
450 metadata_indexes = set()
452 conn_uniques = conn_indexes = frozenset()
454 supports_unique_constraints = False
456 unique_constraints_duplicate_unique_indexes = False
458 if conn_table is not None:
459 # 1b. ... and from connection, if the table exists
460 if hasattr(inspector, "get_unique_constraints"):
461 try:
462 conn_uniques = inspector.get_unique_constraints(
463 tname, schema=schema
464 )
465 supports_unique_constraints = True
466 except NotImplementedError:
467 pass
468 except TypeError:
469 # number of arguments is off for the base
470 # method in SQLAlchemy due to the cache decorator
471 # not being present
472 pass
473 else:
474 for uq in conn_uniques:
475 if uq.get("duplicates_index"):
476 unique_constraints_duplicate_unique_indexes = True
477 try:
478 conn_indexes = inspector.get_indexes(tname, schema=schema)
479 except NotImplementedError:
480 pass
482 # 2. convert conn-level objects from raw inspector records
483 # into schema objects
484 if is_drop_table:
485 # for DROP TABLE uniques are inline, don't need them
486 conn_uniques = set()
487 else:
488 conn_uniques = set(
489 _make_unique_constraint(uq_def, conn_table)
490 for uq_def in conn_uniques
491 )
493 conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes)
495 # 2a. if the dialect dupes unique indexes as unique constraints
496 # (mysql and oracle), correct for that
498 if unique_constraints_duplicate_unique_indexes:
499 _correct_for_uq_duplicates_uix(
500 conn_uniques,
501 conn_indexes,
502 metadata_unique_constraints,
503 metadata_indexes,
504 autogen_context.dialect,
505 )
507 # 3. give the dialect a chance to omit indexes and constraints that
508 # we know are either added implicitly by the DB or that the DB
509 # can't accurately report on
510 autogen_context.migration_context.impl.correct_for_autogen_constraints(
511 conn_uniques,
512 conn_indexes,
513 metadata_unique_constraints,
514 metadata_indexes,
515 )
517 # 4. organize the constraints into "signature" collections, the
518 # _constraint_sig() objects provide a consistent facade over both
519 # Index and UniqueConstraint so we can easily work with them
520 # interchangeably
521 metadata_unique_constraints = set(
522 _uq_constraint_sig(uq) for uq in metadata_unique_constraints
523 )
525 metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes)
527 conn_unique_constraints = set(
528 _uq_constraint_sig(uq) for uq in conn_uniques
529 )
531 conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes)
533 # 5. index things by name, for those objects that have names
534 metadata_names = dict(
535 (c.md_name_to_sql_name(autogen_context), c)
536 for c in metadata_unique_constraints.union(metadata_indexes)
537 if isinstance(c, _ix_constraint_sig)
538 or sqla_compat._constraint_is_named(c.const, autogen_context.dialect)
539 )
541 conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints)
542 conn_indexes_by_name = dict((c.name, c) for c in conn_indexes)
543 conn_names = dict(
544 (c.name, c)
545 for c in conn_unique_constraints.union(conn_indexes)
546 if c.name is not None
547 )
549 doubled_constraints = dict(
550 (name, (conn_uniques_by_name[name], conn_indexes_by_name[name]))
551 for name in set(conn_uniques_by_name).intersection(
552 conn_indexes_by_name
553 )
554 )
556 # 6. index things by "column signature", to help with unnamed unique
557 # constraints.
558 conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints)
559 metadata_uniques_by_sig = dict(
560 (uq.sig, uq) for uq in metadata_unique_constraints
561 )
562 metadata_indexes_by_sig = dict((ix.sig, ix) for ix in metadata_indexes)
563 unnamed_metadata_uniques = dict(
564 (uq.sig, uq)
565 for uq in metadata_unique_constraints
566 if not sqla_compat._constraint_is_named(
567 uq.const, autogen_context.dialect
568 )
569 )
571 # assumptions:
572 # 1. a unique constraint or an index from the connection *always*
573 # has a name.
574 # 2. an index on the metadata side *always* has a name.
575 # 3. a unique constraint on the metadata side *might* have a name.
576 # 4. The backend may double up indexes as unique constraints and
577 # vice versa (e.g. MySQL, Postgresql)
579 def obj_added(obj):
580 if obj.is_index:
581 if autogen_context.run_filters(
582 obj.const, obj.name, "index", False, None
583 ):
584 modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const))
585 log.info(
586 "Detected added index '%s' on %s",
587 obj.name,
588 ", ".join(["'%s'" % obj.column_names]),
589 )
590 else:
591 if not supports_unique_constraints:
592 # can't report unique indexes as added if we don't
593 # detect them
594 return
595 if is_create_table or is_drop_table:
596 # unique constraints are created inline with table defs
597 return
598 if autogen_context.run_filters(
599 obj.const, obj.name, "unique_constraint", False, None
600 ):
601 modify_ops.ops.append(
602 ops.AddConstraintOp.from_constraint(obj.const)
603 )
604 log.info(
605 "Detected added unique constraint '%s' on %s",
606 obj.name,
607 ", ".join(["'%s'" % obj.column_names]),
608 )
610 def obj_removed(obj):
611 if obj.is_index:
612 if obj.is_unique and not supports_unique_constraints:
613 # many databases double up unique constraints
614 # as unique indexes. without that list we can't
615 # be sure what we're doing here
616 return
618 if autogen_context.run_filters(
619 obj.const, obj.name, "index", True, None
620 ):
621 modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const))
622 log.info(
623 "Detected removed index '%s' on '%s'", obj.name, tname
624 )
625 else:
626 if is_create_table or is_drop_table:
627 # if the whole table is being dropped, we don't need to
628 # consider unique constraint separately
629 return
630 if autogen_context.run_filters(
631 obj.const, obj.name, "unique_constraint", True, None
632 ):
633 modify_ops.ops.append(
634 ops.DropConstraintOp.from_constraint(obj.const)
635 )
636 log.info(
637 "Detected removed unique constraint '%s' on '%s'",
638 obj.name,
639 tname,
640 )
642 def obj_changed(old, new, msg):
643 if old.is_index:
644 if autogen_context.run_filters(
645 new.const, new.name, "index", False, old.const
646 ):
647 log.info(
648 "Detected changed index '%s' on '%s':%s",
649 old.name,
650 tname,
651 ", ".join(msg),
652 )
653 modify_ops.ops.append(ops.DropIndexOp.from_index(old.const))
654 modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const))
655 else:
656 if autogen_context.run_filters(
657 new.const, new.name, "unique_constraint", False, old.const
658 ):
659 log.info(
660 "Detected changed unique constraint '%s' on '%s':%s",
661 old.name,
662 tname,
663 ", ".join(msg),
664 )
665 modify_ops.ops.append(
666 ops.DropConstraintOp.from_constraint(old.const)
667 )
668 modify_ops.ops.append(
669 ops.AddConstraintOp.from_constraint(new.const)
670 )
672 for added_name in sorted(set(metadata_names).difference(conn_names)):
673 obj = metadata_names[added_name]
674 obj_added(obj)
676 for existing_name in sorted(set(metadata_names).intersection(conn_names)):
677 metadata_obj = metadata_names[existing_name]
679 if existing_name in doubled_constraints:
680 conn_uq, conn_idx = doubled_constraints[existing_name]
681 if metadata_obj.is_index:
682 conn_obj = conn_idx
683 else:
684 conn_obj = conn_uq
685 else:
686 conn_obj = conn_names[existing_name]
688 if conn_obj.is_index != metadata_obj.is_index:
689 obj_removed(conn_obj)
690 obj_added(metadata_obj)
691 else:
692 msg = []
693 if conn_obj.is_unique != metadata_obj.is_unique:
694 msg.append(
695 " unique=%r to unique=%r"
696 % (conn_obj.is_unique, metadata_obj.is_unique)
697 )
698 if conn_obj.sig != metadata_obj.sig:
699 msg.append(
700 " columns %r to %r" % (conn_obj.sig, metadata_obj.sig)
701 )
703 if msg:
704 obj_changed(conn_obj, metadata_obj, msg)
706 for removed_name in sorted(set(conn_names).difference(metadata_names)):
707 conn_obj = conn_names[removed_name]
708 if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques:
709 continue
710 elif removed_name in doubled_constraints:
711 if (
712 conn_obj.sig not in metadata_indexes_by_sig
713 and conn_obj.sig not in metadata_uniques_by_sig
714 ):
715 conn_uq, conn_idx = doubled_constraints[removed_name]
716 obj_removed(conn_uq)
717 obj_removed(conn_idx)
718 else:
719 obj_removed(conn_obj)
721 for uq_sig in unnamed_metadata_uniques:
722 if uq_sig not in conn_uniques_by_sig:
723 obj_added(unnamed_metadata_uniques[uq_sig])
726def _correct_for_uq_duplicates_uix(
727 conn_unique_constraints,
728 conn_indexes,
729 metadata_unique_constraints,
730 metadata_indexes,
731 dialect,
732):
733 # dedupe unique indexes vs. constraints, since MySQL / Oracle
734 # doesn't really have unique constraints as a separate construct.
735 # but look in the metadata and try to maintain constructs
736 # that already seem to be defined one way or the other
737 # on that side. This logic was formerly local to MySQL dialect,
738 # generalized to Oracle and others. See #276
740 # resolve final rendered name for unique constraints defined in the
741 # metadata. this includes truncation of long names. naming convention
742 # names currently should already be set as cons.name, however leave this
743 # to the sqla_compat to decide.
744 metadata_cons_names = [
745 (sqla_compat._get_constraint_final_name(cons, dialect), cons)
746 for cons in metadata_unique_constraints
747 ]
749 metadata_uq_names = set(
750 name for name, cons in metadata_cons_names if name is not None
751 )
753 unnamed_metadata_uqs = set(
754 [
755 _uq_constraint_sig(cons).sig
756 for name, cons in metadata_cons_names
757 if name is None
758 ]
759 )
761 metadata_ix_names = set(
762 [
763 sqla_compat._get_constraint_final_name(cons, dialect)
764 for cons in metadata_indexes
765 if cons.unique
766 ]
767 )
769 # for reflection side, names are in their final database form
770 # already since they're from the database
771 conn_ix_names = dict(
772 (cons.name, cons) for cons in conn_indexes if cons.unique
773 )
775 uqs_dupe_indexes = dict(
776 (cons.name, cons)
777 for cons in conn_unique_constraints
778 if cons.info["duplicates_index"]
779 )
781 for overlap in uqs_dupe_indexes:
782 if overlap not in metadata_uq_names:
783 if (
784 _uq_constraint_sig(uqs_dupe_indexes[overlap]).sig
785 not in unnamed_metadata_uqs
786 ):
788 conn_unique_constraints.discard(uqs_dupe_indexes[overlap])
789 elif overlap not in metadata_ix_names:
790 conn_indexes.discard(conn_ix_names[overlap])
793@comparators.dispatch_for("column")
794def _compare_nullable(
795 autogen_context,
796 alter_column_op,
797 schema,
798 tname,
799 cname,
800 conn_col,
801 metadata_col,
802):
804 # work around SQLAlchemy issue #3023
805 if metadata_col.primary_key:
806 return
808 metadata_col_nullable = metadata_col.nullable
809 conn_col_nullable = conn_col.nullable
810 alter_column_op.existing_nullable = conn_col_nullable
812 if conn_col_nullable is not metadata_col_nullable:
813 alter_column_op.modify_nullable = metadata_col_nullable
814 log.info(
815 "Detected %s on column '%s.%s'",
816 "NULL" if metadata_col_nullable else "NOT NULL",
817 tname,
818 cname,
819 )
822@comparators.dispatch_for("column")
823def _setup_autoincrement(
824 autogen_context,
825 alter_column_op,
826 schema,
827 tname,
828 cname,
829 conn_col,
830 metadata_col,
831):
833 if metadata_col.table._autoincrement_column is metadata_col:
834 alter_column_op.kw["autoincrement"] = True
835 elif metadata_col.autoincrement is True:
836 alter_column_op.kw["autoincrement"] = True
837 elif metadata_col.autoincrement is False:
838 alter_column_op.kw["autoincrement"] = False
841@comparators.dispatch_for("column")
842def _compare_type(
843 autogen_context,
844 alter_column_op,
845 schema,
846 tname,
847 cname,
848 conn_col,
849 metadata_col,
850):
852 conn_type = conn_col.type
853 alter_column_op.existing_type = conn_type
854 metadata_type = metadata_col.type
855 if conn_type._type_affinity is sqltypes.NullType:
856 log.info(
857 "Couldn't determine database type " "for column '%s.%s'",
858 tname,
859 cname,
860 )
861 return
862 if metadata_type._type_affinity is sqltypes.NullType:
863 log.info(
864 "Column '%s.%s' has no type within " "the model; can't compare",
865 tname,
866 cname,
867 )
868 return
870 isdiff = autogen_context.migration_context._compare_type(
871 conn_col, metadata_col
872 )
874 if isdiff:
875 alter_column_op.modify_type = metadata_type
876 log.info(
877 "Detected type change from %r to %r on '%s.%s'",
878 conn_type,
879 metadata_type,
880 tname,
881 cname,
882 )
885def _render_server_default_for_compare(
886 metadata_default, metadata_col, autogen_context
887):
888 rendered = _user_defined_render(
889 "server_default", metadata_default, autogen_context
890 )
891 if rendered is not False:
892 return rendered
894 if isinstance(metadata_default, sa_schema.DefaultClause):
895 if isinstance(metadata_default.arg, compat.string_types):
896 metadata_default = metadata_default.arg
897 else:
898 metadata_default = str(
899 metadata_default.arg.compile(
900 dialect=autogen_context.dialect,
901 compile_kwargs={"literal_binds": True},
902 )
903 )
904 if isinstance(metadata_default, compat.string_types):
905 if metadata_col.type._type_affinity is sqltypes.String:
906 metadata_default = re.sub(r"^'|'$", "", metadata_default)
907 return repr(metadata_default)
908 else:
909 return metadata_default
910 else:
911 return None
914def _normalize_computed_default(sqltext):
915 """we want to warn if a computed sql expression has changed. however
916 we don't want false positives and the warning is not that critical.
917 so filter out most forms of variability from the SQL text.
919 """
921 return re.sub(r"[ \(\)'\"`\[\]]", "", sqltext).lower()
924def _compare_computed_default(
925 autogen_context,
926 alter_column_op,
927 schema,
928 tname,
929 cname,
930 conn_col,
931 metadata_col,
932):
933 rendered_metadata_default = str(
934 metadata_col.server_default.sqltext.compile(
935 dialect=autogen_context.dialect,
936 compile_kwargs={"literal_binds": True},
937 )
938 )
940 # since we cannot change computed columns, we do only a crude comparison
941 # here where we try to eliminate syntactical differences in order to
942 # get a minimal comparison just to emit a warning.
944 rendered_metadata_default = _normalize_computed_default(
945 rendered_metadata_default
946 )
948 if isinstance(conn_col.server_default, sa_schema.Computed):
949 rendered_conn_default = str(
950 conn_col.server_default.sqltext.compile(
951 dialect=autogen_context.dialect,
952 compile_kwargs={"literal_binds": True},
953 )
954 )
955 if rendered_conn_default is None:
956 rendered_conn_default = ""
957 else:
958 rendered_conn_default = _normalize_computed_default(
959 rendered_conn_default
960 )
961 else:
962 rendered_conn_default = ""
964 if rendered_metadata_default != rendered_conn_default:
965 _warn_computed_not_supported(tname, cname)
968def _warn_computed_not_supported(tname, cname):
969 util.warn("Computed default on %s.%s cannot be modified" % (tname, cname))
972@comparators.dispatch_for("column")
973def _compare_server_default(
974 autogen_context,
975 alter_column_op,
976 schema,
977 tname,
978 cname,
979 conn_col,
980 metadata_col,
981):
983 metadata_default = metadata_col.server_default
984 conn_col_default = conn_col.server_default
985 if conn_col_default is None and metadata_default is None:
986 return False
988 if sqla_compat.has_computed and isinstance(
989 metadata_default, sa_schema.Computed
990 ):
991 # return False in case of a computed column as the server
992 # default. Note that DDL for adding or removing "GENERATED AS" from
993 # an existing column is not currently known for any backend.
994 # Once SQLAlchemy can reflect "GENERATED" as the "computed" element,
995 # we would also want to ignore and/or warn for changes vs. the
996 # metadata (or support backend specific DDL if applicable).
997 if not sqla_compat.has_computed_reflection:
998 return False
1000 else:
1001 return _compare_computed_default(
1002 autogen_context,
1003 alter_column_op,
1004 schema,
1005 tname,
1006 cname,
1007 conn_col,
1008 metadata_col,
1009 )
1010 rendered_metadata_default = _render_server_default_for_compare(
1011 metadata_default, metadata_col, autogen_context
1012 )
1014 if sqla_compat.has_computed_reflection and isinstance(
1015 conn_col.server_default, sa_schema.Computed
1016 ):
1017 _warn_computed_not_supported(tname, cname)
1018 return False
1019 else:
1020 rendered_conn_default = (
1021 conn_col.server_default.arg.text
1022 if conn_col.server_default
1023 else None
1024 )
1026 alter_column_op.existing_server_default = conn_col_default
1028 isdiff = autogen_context.migration_context._compare_server_default(
1029 conn_col,
1030 metadata_col,
1031 rendered_metadata_default,
1032 rendered_conn_default,
1033 )
1034 if isdiff:
1035 alter_column_op.modify_server_default = metadata_default
1036 log.info("Detected server default on column '%s.%s'", tname, cname)
1039@comparators.dispatch_for("column")
1040def _compare_column_comment(
1041 autogen_context,
1042 alter_column_op,
1043 schema,
1044 tname,
1045 cname,
1046 conn_col,
1047 metadata_col,
1048):
1050 if not sqla_compat._dialect_supports_comments(autogen_context.dialect):
1051 return
1053 metadata_comment = metadata_col.comment
1054 conn_col_comment = conn_col.comment
1055 if conn_col_comment is None and metadata_comment is None:
1056 return False
1058 alter_column_op.existing_comment = conn_col_comment
1060 if conn_col_comment != metadata_comment:
1061 alter_column_op.modify_comment = metadata_comment
1062 log.info("Detected column comment '%s.%s'", tname, cname)
1065@comparators.dispatch_for("table")
1066def _compare_foreign_keys(
1067 autogen_context,
1068 modify_table_ops,
1069 schema,
1070 tname,
1071 conn_table,
1072 metadata_table,
1073):
1075 # if we're doing CREATE TABLE, all FKs are created
1076 # inline within the table def
1077 if conn_table is None or metadata_table is None:
1078 return
1080 inspector = autogen_context.inspector
1081 metadata_fks = set(
1082 fk
1083 for fk in metadata_table.constraints
1084 if isinstance(fk, sa_schema.ForeignKeyConstraint)
1085 )
1087 conn_fks = inspector.get_foreign_keys(tname, schema=schema)
1089 backend_reflects_fk_options = conn_fks and "options" in conn_fks[0]
1091 conn_fks = set(_make_foreign_key(const, conn_table) for const in conn_fks)
1093 # give the dialect a chance to correct the FKs to match more
1094 # closely
1095 autogen_context.migration_context.impl.correct_for_autogen_foreignkeys(
1096 conn_fks, metadata_fks
1097 )
1099 metadata_fks = set(
1100 _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
1101 for fk in metadata_fks
1102 )
1104 conn_fks = set(
1105 _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
1106 for fk in conn_fks
1107 )
1109 conn_fks_by_sig = dict((c.sig, c) for c in conn_fks)
1110 metadata_fks_by_sig = dict((c.sig, c) for c in metadata_fks)
1112 metadata_fks_by_name = dict(
1113 (c.name, c) for c in metadata_fks if c.name is not None
1114 )
1115 conn_fks_by_name = dict(
1116 (c.name, c) for c in conn_fks if c.name is not None
1117 )
1119 def _add_fk(obj, compare_to):
1120 if autogen_context.run_filters(
1121 obj.const, obj.name, "foreign_key_constraint", False, compare_to
1122 ):
1123 modify_table_ops.ops.append(
1124 ops.CreateForeignKeyOp.from_constraint(const.const)
1125 )
1127 log.info(
1128 "Detected added foreign key (%s)(%s) on table %s%s",
1129 ", ".join(obj.source_columns),
1130 ", ".join(obj.target_columns),
1131 "%s." % obj.source_schema if obj.source_schema else "",
1132 obj.source_table,
1133 )
1135 def _remove_fk(obj, compare_to):
1136 if autogen_context.run_filters(
1137 obj.const, obj.name, "foreign_key_constraint", True, compare_to
1138 ):
1139 modify_table_ops.ops.append(
1140 ops.DropConstraintOp.from_constraint(obj.const)
1141 )
1142 log.info(
1143 "Detected removed foreign key (%s)(%s) on table %s%s",
1144 ", ".join(obj.source_columns),
1145 ", ".join(obj.target_columns),
1146 "%s." % obj.source_schema if obj.source_schema else "",
1147 obj.source_table,
1148 )
1150 # so far it appears we don't need to do this by name at all.
1151 # SQLite doesn't preserve constraint names anyway
1153 for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig):
1154 const = conn_fks_by_sig[removed_sig]
1155 if removed_sig not in metadata_fks_by_sig:
1156 compare_to = (
1157 metadata_fks_by_name[const.name].const
1158 if const.name in metadata_fks_by_name
1159 else None
1160 )
1161 _remove_fk(const, compare_to)
1163 for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig):
1164 const = metadata_fks_by_sig[added_sig]
1165 if added_sig not in conn_fks_by_sig:
1166 compare_to = (
1167 conn_fks_by_name[const.name].const
1168 if const.name in conn_fks_by_name
1169 else None
1170 )
1171 _add_fk(const, compare_to)
1174@comparators.dispatch_for("table")
1175def _compare_table_comment(
1176 autogen_context,
1177 modify_table_ops,
1178 schema,
1179 tname,
1180 conn_table,
1181 metadata_table,
1182):
1184 if not sqla_compat._dialect_supports_comments(autogen_context.dialect):
1185 return
1187 # if we're doing CREATE TABLE, comments will be created inline
1188 # with the create_table op.
1189 if conn_table is None or metadata_table is None:
1190 return
1192 if conn_table.comment is None and metadata_table.comment is None:
1193 return
1195 if metadata_table.comment is None and conn_table.comment is not None:
1196 modify_table_ops.ops.append(
1197 ops.DropTableCommentOp(
1198 tname, existing_comment=conn_table.comment, schema=schema
1199 )
1200 )
1201 elif metadata_table.comment != conn_table.comment:
1202 modify_table_ops.ops.append(
1203 ops.CreateTableCommentOp(
1204 tname,
1205 metadata_table.comment,
1206 existing_comment=conn_table.comment,
1207 schema=schema,
1208 )
1209 )