update to python fastpi
This commit is contained in:
@@ -0,0 +1,15 @@
|
||||
from . import toimpl
|
||||
from .base import AbstractOperations
|
||||
from .base import BatchOperations
|
||||
from .base import Operations
|
||||
from .ops import MigrateOperation
|
||||
from .ops import MigrationScript
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AbstractOperations",
|
||||
"Operations",
|
||||
"BatchOperations",
|
||||
"MigrateOperation",
|
||||
"MigrationScript",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
1837
Backend/venv/lib/python3.12/site-packages/alembic/operations/base.py
Normal file
1837
Backend/venv/lib/python3.12/site-packages/alembic/operations/base.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,718 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy import CheckConstraint
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy import ForeignKeyConstraint
|
||||
from sqlalchemy import Index
|
||||
from sqlalchemy import MetaData
|
||||
from sqlalchemy import PrimaryKeyConstraint
|
||||
from sqlalchemy import schema as sql_schema
|
||||
from sqlalchemy import Table
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.events import SchemaEventTarget
|
||||
from sqlalchemy.util import OrderedDict
|
||||
from sqlalchemy.util import topological
|
||||
|
||||
from ..util import exc
|
||||
from ..util.sqla_compat import _columns_for_constraint
|
||||
from ..util.sqla_compat import _copy
|
||||
from ..util.sqla_compat import _copy_expression
|
||||
from ..util.sqla_compat import _ensure_scope_for_ddl
|
||||
from ..util.sqla_compat import _fk_is_self_referential
|
||||
from ..util.sqla_compat import _idx_table_bound_expressions
|
||||
from ..util.sqla_compat import _insert_inline
|
||||
from ..util.sqla_compat import _is_type_bound
|
||||
from ..util.sqla_compat import _remove_column_from_collection
|
||||
from ..util.sqla_compat import _resolve_for_variant
|
||||
from ..util.sqla_compat import _select
|
||||
from ..util.sqla_compat import constraint_name_defined
|
||||
from ..util.sqla_compat import constraint_name_string
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Literal
|
||||
|
||||
from sqlalchemy.engine import Dialect
|
||||
from sqlalchemy.sql.elements import ColumnClause
|
||||
from sqlalchemy.sql.elements import quoted_name
|
||||
from sqlalchemy.sql.functions import Function
|
||||
from sqlalchemy.sql.schema import Constraint
|
||||
from sqlalchemy.sql.type_api import TypeEngine
|
||||
|
||||
from ..ddl.impl import DefaultImpl
|
||||
|
||||
|
||||
class BatchOperationsImpl:
|
||||
def __init__(
|
||||
self,
|
||||
operations,
|
||||
table_name,
|
||||
schema,
|
||||
recreate,
|
||||
copy_from,
|
||||
table_args,
|
||||
table_kwargs,
|
||||
reflect_args,
|
||||
reflect_kwargs,
|
||||
naming_convention,
|
||||
partial_reordering,
|
||||
):
|
||||
self.operations = operations
|
||||
self.table_name = table_name
|
||||
self.schema = schema
|
||||
if recreate not in ("auto", "always", "never"):
|
||||
raise ValueError(
|
||||
"recreate may be one of 'auto', 'always', or 'never'."
|
||||
)
|
||||
self.recreate = recreate
|
||||
self.copy_from = copy_from
|
||||
self.table_args = table_args
|
||||
self.table_kwargs = dict(table_kwargs)
|
||||
self.reflect_args = reflect_args
|
||||
self.reflect_kwargs = dict(reflect_kwargs)
|
||||
self.reflect_kwargs.setdefault(
|
||||
"listeners", list(self.reflect_kwargs.get("listeners", ()))
|
||||
)
|
||||
self.reflect_kwargs["listeners"].append(
|
||||
("column_reflect", operations.impl.autogen_column_reflect)
|
||||
)
|
||||
self.naming_convention = naming_convention
|
||||
self.partial_reordering = partial_reordering
|
||||
self.batch = []
|
||||
|
||||
@property
|
||||
def dialect(self) -> Dialect:
|
||||
return self.operations.impl.dialect
|
||||
|
||||
@property
|
||||
def impl(self) -> DefaultImpl:
|
||||
return self.operations.impl
|
||||
|
||||
def _should_recreate(self) -> bool:
|
||||
if self.recreate == "auto":
|
||||
return self.operations.impl.requires_recreate_in_batch(self)
|
||||
elif self.recreate == "always":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def flush(self) -> None:
|
||||
should_recreate = self._should_recreate()
|
||||
|
||||
with _ensure_scope_for_ddl(self.impl.connection):
|
||||
if not should_recreate:
|
||||
for opname, arg, kw in self.batch:
|
||||
fn = getattr(self.operations.impl, opname)
|
||||
fn(*arg, **kw)
|
||||
else:
|
||||
if self.naming_convention:
|
||||
m1 = MetaData(naming_convention=self.naming_convention)
|
||||
else:
|
||||
m1 = MetaData()
|
||||
|
||||
if self.copy_from is not None:
|
||||
existing_table = self.copy_from
|
||||
reflected = False
|
||||
else:
|
||||
if self.operations.migration_context.as_sql:
|
||||
raise exc.CommandError(
|
||||
f"This operation cannot proceed in --sql mode; "
|
||||
f"batch mode with dialect "
|
||||
f"{self.operations.migration_context.dialect.name} " # noqa: E501
|
||||
f"requires a live database connection with which "
|
||||
f'to reflect the table "{self.table_name}". '
|
||||
f"To generate a batch SQL migration script using "
|
||||
"table "
|
||||
'"move and copy", a complete Table object '
|
||||
f'should be passed to the "copy_from" argument '
|
||||
"of the batch_alter_table() method so that table "
|
||||
"reflection can be skipped."
|
||||
)
|
||||
|
||||
existing_table = Table(
|
||||
self.table_name,
|
||||
m1,
|
||||
schema=self.schema,
|
||||
autoload_with=self.operations.get_bind(),
|
||||
*self.reflect_args,
|
||||
**self.reflect_kwargs,
|
||||
)
|
||||
reflected = True
|
||||
|
||||
batch_impl = ApplyBatchImpl(
|
||||
self.impl,
|
||||
existing_table,
|
||||
self.table_args,
|
||||
self.table_kwargs,
|
||||
reflected,
|
||||
partial_reordering=self.partial_reordering,
|
||||
)
|
||||
for opname, arg, kw in self.batch:
|
||||
fn = getattr(batch_impl, opname)
|
||||
fn(*arg, **kw)
|
||||
|
||||
batch_impl._create(self.impl)
|
||||
|
||||
def alter_column(self, *arg, **kw) -> None:
|
||||
self.batch.append(("alter_column", arg, kw))
|
||||
|
||||
def add_column(self, *arg, **kw) -> None:
|
||||
if (
|
||||
"insert_before" in kw or "insert_after" in kw
|
||||
) and not self._should_recreate():
|
||||
raise exc.CommandError(
|
||||
"Can't specify insert_before or insert_after when using "
|
||||
"ALTER; please specify recreate='always'"
|
||||
)
|
||||
self.batch.append(("add_column", arg, kw))
|
||||
|
||||
def drop_column(self, *arg, **kw) -> None:
|
||||
self.batch.append(("drop_column", arg, kw))
|
||||
|
||||
def add_constraint(self, const: Constraint) -> None:
|
||||
self.batch.append(("add_constraint", (const,), {}))
|
||||
|
||||
def drop_constraint(self, const: Constraint) -> None:
|
||||
self.batch.append(("drop_constraint", (const,), {}))
|
||||
|
||||
def rename_table(self, *arg, **kw):
|
||||
self.batch.append(("rename_table", arg, kw))
|
||||
|
||||
def create_index(self, idx: Index, **kw: Any) -> None:
|
||||
self.batch.append(("create_index", (idx,), kw))
|
||||
|
||||
def drop_index(self, idx: Index, **kw: Any) -> None:
|
||||
self.batch.append(("drop_index", (idx,), kw))
|
||||
|
||||
def create_table_comment(self, table):
|
||||
self.batch.append(("create_table_comment", (table,), {}))
|
||||
|
||||
def drop_table_comment(self, table):
|
||||
self.batch.append(("drop_table_comment", (table,), {}))
|
||||
|
||||
def create_table(self, table):
|
||||
raise NotImplementedError("Can't create table in batch mode")
|
||||
|
||||
def drop_table(self, table):
|
||||
raise NotImplementedError("Can't drop table in batch mode")
|
||||
|
||||
def create_column_comment(self, column):
|
||||
self.batch.append(("create_column_comment", (column,), {}))
|
||||
|
||||
|
||||
class ApplyBatchImpl:
|
||||
def __init__(
|
||||
self,
|
||||
impl: DefaultImpl,
|
||||
table: Table,
|
||||
table_args: tuple,
|
||||
table_kwargs: Dict[str, Any],
|
||||
reflected: bool,
|
||||
partial_reordering: tuple = (),
|
||||
) -> None:
|
||||
self.impl = impl
|
||||
self.table = table # this is a Table object
|
||||
self.table_args = table_args
|
||||
self.table_kwargs = table_kwargs
|
||||
self.temp_table_name = self._calc_temp_name(table.name)
|
||||
self.new_table: Optional[Table] = None
|
||||
|
||||
self.partial_reordering = partial_reordering # tuple of tuples
|
||||
self.add_col_ordering: Tuple[
|
||||
Tuple[str, str], ...
|
||||
] = () # tuple of tuples
|
||||
|
||||
self.column_transfers = OrderedDict(
|
||||
(c.name, {"expr": c}) for c in self.table.c
|
||||
)
|
||||
self.existing_ordering = list(self.column_transfers)
|
||||
|
||||
self.reflected = reflected
|
||||
self._grab_table_elements()
|
||||
|
||||
@classmethod
|
||||
def _calc_temp_name(cls, tablename: Union[quoted_name, str]) -> str:
|
||||
return ("_alembic_tmp_%s" % tablename)[0:50]
|
||||
|
||||
def _grab_table_elements(self) -> None:
|
||||
schema = self.table.schema
|
||||
self.columns: Dict[str, Column[Any]] = OrderedDict()
|
||||
for c in self.table.c:
|
||||
c_copy = _copy(c, schema=schema)
|
||||
c_copy.unique = c_copy.index = False
|
||||
# ensure that the type object was copied,
|
||||
# as we may need to modify it in-place
|
||||
if isinstance(c.type, SchemaEventTarget):
|
||||
assert c_copy.type is not c.type
|
||||
self.columns[c.name] = c_copy
|
||||
self.named_constraints: Dict[str, Constraint] = {}
|
||||
self.unnamed_constraints = []
|
||||
self.col_named_constraints = {}
|
||||
self.indexes: Dict[str, Index] = {}
|
||||
self.new_indexes: Dict[str, Index] = {}
|
||||
|
||||
for const in self.table.constraints:
|
||||
if _is_type_bound(const):
|
||||
continue
|
||||
elif (
|
||||
self.reflected
|
||||
and isinstance(const, CheckConstraint)
|
||||
and not const.name
|
||||
):
|
||||
# TODO: we are skipping unnamed reflected CheckConstraint
|
||||
# because
|
||||
# we have no way to determine _is_type_bound() for these.
|
||||
pass
|
||||
elif constraint_name_string(const.name):
|
||||
self.named_constraints[const.name] = const
|
||||
else:
|
||||
self.unnamed_constraints.append(const)
|
||||
|
||||
if not self.reflected:
|
||||
for col in self.table.c:
|
||||
for const in col.constraints:
|
||||
if const.name:
|
||||
self.col_named_constraints[const.name] = (col, const)
|
||||
|
||||
for idx in self.table.indexes:
|
||||
self.indexes[idx.name] = idx # type: ignore[index]
|
||||
|
||||
for k in self.table.kwargs:
|
||||
self.table_kwargs.setdefault(k, self.table.kwargs[k])
|
||||
|
||||
def _adjust_self_columns_for_partial_reordering(self) -> None:
|
||||
pairs = set()
|
||||
|
||||
col_by_idx = list(self.columns)
|
||||
|
||||
if self.partial_reordering:
|
||||
for tuple_ in self.partial_reordering:
|
||||
for index, elem in enumerate(tuple_):
|
||||
if index > 0:
|
||||
pairs.add((tuple_[index - 1], elem))
|
||||
else:
|
||||
for index, elem in enumerate(self.existing_ordering):
|
||||
if index > 0:
|
||||
pairs.add((col_by_idx[index - 1], elem))
|
||||
|
||||
pairs.update(self.add_col_ordering)
|
||||
|
||||
# this can happen if some columns were dropped and not removed
|
||||
# from existing_ordering. this should be prevented already, but
|
||||
# conservatively making sure this didn't happen
|
||||
pairs_list = [p for p in pairs if p[0] != p[1]]
|
||||
|
||||
sorted_ = list(
|
||||
topological.sort(pairs_list, col_by_idx, deterministic_order=True)
|
||||
)
|
||||
self.columns = OrderedDict((k, self.columns[k]) for k in sorted_)
|
||||
self.column_transfers = OrderedDict(
|
||||
(k, self.column_transfers[k]) for k in sorted_
|
||||
)
|
||||
|
||||
def _transfer_elements_to_new_table(self) -> None:
|
||||
assert self.new_table is None, "Can only create new table once"
|
||||
|
||||
m = MetaData()
|
||||
schema = self.table.schema
|
||||
|
||||
if self.partial_reordering or self.add_col_ordering:
|
||||
self._adjust_self_columns_for_partial_reordering()
|
||||
|
||||
self.new_table = new_table = Table(
|
||||
self.temp_table_name,
|
||||
m,
|
||||
*(list(self.columns.values()) + list(self.table_args)),
|
||||
schema=schema,
|
||||
**self.table_kwargs,
|
||||
)
|
||||
|
||||
for const in (
|
||||
list(self.named_constraints.values()) + self.unnamed_constraints
|
||||
):
|
||||
const_columns = {c.key for c in _columns_for_constraint(const)}
|
||||
|
||||
if not const_columns.issubset(self.column_transfers):
|
||||
continue
|
||||
|
||||
const_copy: Constraint
|
||||
if isinstance(const, ForeignKeyConstraint):
|
||||
if _fk_is_self_referential(const):
|
||||
# for self-referential constraint, refer to the
|
||||
# *original* table name, and not _alembic_batch_temp.
|
||||
# This is consistent with how we're handling
|
||||
# FK constraints from other tables; we assume SQLite
|
||||
# no foreign keys just keeps the names unchanged, so
|
||||
# when we rename back, they match again.
|
||||
const_copy = _copy(
|
||||
const, schema=schema, target_table=self.table
|
||||
)
|
||||
else:
|
||||
# "target_table" for ForeignKeyConstraint.copy() is
|
||||
# only used if the FK is detected as being
|
||||
# self-referential, which we are handling above.
|
||||
const_copy = _copy(const, schema=schema)
|
||||
else:
|
||||
const_copy = _copy(
|
||||
const, schema=schema, target_table=new_table
|
||||
)
|
||||
if isinstance(const, ForeignKeyConstraint):
|
||||
self._setup_referent(m, const)
|
||||
new_table.append_constraint(const_copy)
|
||||
|
||||
def _gather_indexes_from_both_tables(self) -> List[Index]:
|
||||
assert self.new_table is not None
|
||||
idx: List[Index] = []
|
||||
|
||||
for idx_existing in self.indexes.values():
|
||||
# this is a lift-and-move from Table.to_metadata
|
||||
|
||||
if idx_existing._column_flag: # type: ignore
|
||||
continue
|
||||
|
||||
idx_copy = Index(
|
||||
idx_existing.name,
|
||||
unique=idx_existing.unique,
|
||||
*[
|
||||
_copy_expression(expr, self.new_table)
|
||||
for expr in _idx_table_bound_expressions(idx_existing)
|
||||
],
|
||||
_table=self.new_table,
|
||||
**idx_existing.kwargs,
|
||||
)
|
||||
idx.append(idx_copy)
|
||||
|
||||
for index in self.new_indexes.values():
|
||||
idx.append(
|
||||
Index(
|
||||
index.name,
|
||||
unique=index.unique,
|
||||
*[self.new_table.c[col] for col in index.columns.keys()],
|
||||
**index.kwargs,
|
||||
)
|
||||
)
|
||||
return idx
|
||||
|
||||
def _setup_referent(
|
||||
self, metadata: MetaData, constraint: ForeignKeyConstraint
|
||||
) -> None:
|
||||
spec = constraint.elements[
|
||||
0
|
||||
]._get_colspec() # type:ignore[attr-defined]
|
||||
parts = spec.split(".")
|
||||
tname = parts[-2]
|
||||
if len(parts) == 3:
|
||||
referent_schema = parts[0]
|
||||
else:
|
||||
referent_schema = None
|
||||
|
||||
if tname != self.temp_table_name:
|
||||
key = sql_schema._get_table_key(tname, referent_schema)
|
||||
|
||||
def colspec(elem: Any):
|
||||
return elem._get_colspec()
|
||||
|
||||
if key in metadata.tables:
|
||||
t = metadata.tables[key]
|
||||
for elem in constraint.elements:
|
||||
colname = colspec(elem).split(".")[-1]
|
||||
if colname not in t.c:
|
||||
t.append_column(Column(colname, sqltypes.NULLTYPE))
|
||||
else:
|
||||
Table(
|
||||
tname,
|
||||
metadata,
|
||||
*[
|
||||
Column(n, sqltypes.NULLTYPE)
|
||||
for n in [
|
||||
colspec(elem).split(".")[-1]
|
||||
for elem in constraint.elements
|
||||
]
|
||||
],
|
||||
schema=referent_schema,
|
||||
)
|
||||
|
||||
def _create(self, op_impl: DefaultImpl) -> None:
|
||||
self._transfer_elements_to_new_table()
|
||||
|
||||
op_impl.prep_table_for_batch(self, self.table)
|
||||
assert self.new_table is not None
|
||||
op_impl.create_table(self.new_table)
|
||||
|
||||
try:
|
||||
op_impl._exec(
|
||||
_insert_inline(self.new_table).from_select(
|
||||
list(
|
||||
k
|
||||
for k, transfer in self.column_transfers.items()
|
||||
if "expr" in transfer
|
||||
),
|
||||
_select(
|
||||
*[
|
||||
transfer["expr"]
|
||||
for transfer in self.column_transfers.values()
|
||||
if "expr" in transfer
|
||||
]
|
||||
),
|
||||
)
|
||||
)
|
||||
op_impl.drop_table(self.table)
|
||||
except:
|
||||
op_impl.drop_table(self.new_table)
|
||||
raise
|
||||
else:
|
||||
op_impl.rename_table(
|
||||
self.temp_table_name, self.table.name, schema=self.table.schema
|
||||
)
|
||||
self.new_table.name = self.table.name
|
||||
try:
|
||||
for idx in self._gather_indexes_from_both_tables():
|
||||
op_impl.create_index(idx)
|
||||
finally:
|
||||
self.new_table.name = self.temp_table_name
|
||||
|
||||
def alter_column(
|
||||
self,
|
||||
table_name: str,
|
||||
column_name: str,
|
||||
nullable: Optional[bool] = None,
|
||||
server_default: Optional[Union[Function[Any], str, bool]] = False,
|
||||
name: Optional[str] = None,
|
||||
type_: Optional[TypeEngine] = None,
|
||||
autoincrement: Optional[Union[bool, Literal["auto"]]] = None,
|
||||
comment: Union[str, Literal[False]] = False,
|
||||
**kw,
|
||||
) -> None:
|
||||
existing = self.columns[column_name]
|
||||
existing_transfer: Dict[str, Any] = self.column_transfers[column_name]
|
||||
if name is not None and name != column_name:
|
||||
# note that we don't change '.key' - we keep referring
|
||||
# to the renamed column by its old key in _create(). neat!
|
||||
existing.name = name
|
||||
existing_transfer["name"] = name
|
||||
|
||||
existing_type = kw.get("existing_type", None)
|
||||
if existing_type:
|
||||
resolved_existing_type = _resolve_for_variant(
|
||||
kw["existing_type"], self.impl.dialect
|
||||
)
|
||||
|
||||
# pop named constraints for Boolean/Enum for rename
|
||||
if (
|
||||
isinstance(resolved_existing_type, SchemaEventTarget)
|
||||
and resolved_existing_type.name # type:ignore[attr-defined] # noqa E501
|
||||
):
|
||||
self.named_constraints.pop(
|
||||
resolved_existing_type.name, # type:ignore[attr-defined] # noqa E501
|
||||
None,
|
||||
)
|
||||
|
||||
if type_ is not None:
|
||||
type_ = sqltypes.to_instance(type_)
|
||||
# old type is being discarded so turn off eventing
|
||||
# rules. Alternatively we can
|
||||
# erase the events set up by this type, but this is simpler.
|
||||
# we also ignore the drop_constraint that will come here from
|
||||
# Operations.implementation_for(alter_column)
|
||||
|
||||
if isinstance(existing.type, SchemaEventTarget):
|
||||
existing.type._create_events = ( # type:ignore[attr-defined]
|
||||
existing.type.create_constraint # type:ignore[attr-defined] # noqa
|
||||
) = False
|
||||
|
||||
self.impl.cast_for_batch_migrate(
|
||||
existing, existing_transfer, type_
|
||||
)
|
||||
|
||||
existing.type = type_
|
||||
|
||||
# we *dont* however set events for the new type, because
|
||||
# alter_column is invoked from
|
||||
# Operations.implementation_for(alter_column) which already
|
||||
# will emit an add_constraint()
|
||||
|
||||
if nullable is not None:
|
||||
existing.nullable = nullable
|
||||
if server_default is not False:
|
||||
if server_default is None:
|
||||
existing.server_default = None
|
||||
else:
|
||||
sql_schema.DefaultClause(
|
||||
server_default # type: ignore[arg-type]
|
||||
)._set_parent( # type:ignore[attr-defined]
|
||||
existing
|
||||
)
|
||||
if autoincrement is not None:
|
||||
existing.autoincrement = bool(autoincrement)
|
||||
|
||||
if comment is not False:
|
||||
existing.comment = comment
|
||||
|
||||
def _setup_dependencies_for_add_column(
|
||||
self,
|
||||
colname: str,
|
||||
insert_before: Optional[str],
|
||||
insert_after: Optional[str],
|
||||
) -> None:
|
||||
index_cols = self.existing_ordering
|
||||
col_indexes = {name: i for i, name in enumerate(index_cols)}
|
||||
|
||||
if not self.partial_reordering:
|
||||
if insert_after:
|
||||
if not insert_before:
|
||||
if insert_after in col_indexes:
|
||||
# insert after an existing column
|
||||
idx = col_indexes[insert_after] + 1
|
||||
if idx < len(index_cols):
|
||||
insert_before = index_cols[idx]
|
||||
else:
|
||||
# insert after a column that is also new
|
||||
insert_before = dict(self.add_col_ordering)[
|
||||
insert_after
|
||||
]
|
||||
if insert_before:
|
||||
if not insert_after:
|
||||
if insert_before in col_indexes:
|
||||
# insert before an existing column
|
||||
idx = col_indexes[insert_before] - 1
|
||||
if idx >= 0:
|
||||
insert_after = index_cols[idx]
|
||||
else:
|
||||
# insert before a column that is also new
|
||||
insert_after = {
|
||||
b: a for a, b in self.add_col_ordering
|
||||
}[insert_before]
|
||||
|
||||
if insert_before:
|
||||
self.add_col_ordering += ((colname, insert_before),)
|
||||
if insert_after:
|
||||
self.add_col_ordering += ((insert_after, colname),)
|
||||
|
||||
if (
|
||||
not self.partial_reordering
|
||||
and not insert_before
|
||||
and not insert_after
|
||||
and col_indexes
|
||||
):
|
||||
self.add_col_ordering += ((index_cols[-1], colname),)
|
||||
|
||||
def add_column(
|
||||
self,
|
||||
table_name: str,
|
||||
column: Column[Any],
|
||||
insert_before: Optional[str] = None,
|
||||
insert_after: Optional[str] = None,
|
||||
**kw,
|
||||
) -> None:
|
||||
self._setup_dependencies_for_add_column(
|
||||
column.name, insert_before, insert_after
|
||||
)
|
||||
# we copy the column because operations.add_column()
|
||||
# gives us a Column that is part of a Table already.
|
||||
self.columns[column.name] = _copy(column, schema=self.table.schema)
|
||||
self.column_transfers[column.name] = {}
|
||||
|
||||
def drop_column(
|
||||
self,
|
||||
table_name: str,
|
||||
column: Union[ColumnClause[Any], Column[Any]],
|
||||
**kw,
|
||||
) -> None:
|
||||
if column.name in self.table.primary_key.columns:
|
||||
_remove_column_from_collection(
|
||||
self.table.primary_key.columns, column
|
||||
)
|
||||
del self.columns[column.name]
|
||||
del self.column_transfers[column.name]
|
||||
self.existing_ordering.remove(column.name)
|
||||
|
||||
# pop named constraints for Boolean/Enum for rename
|
||||
if (
|
||||
"existing_type" in kw
|
||||
and isinstance(kw["existing_type"], SchemaEventTarget)
|
||||
and kw["existing_type"].name # type:ignore[attr-defined]
|
||||
):
|
||||
self.named_constraints.pop(
|
||||
kw["existing_type"].name, None # type:ignore[attr-defined]
|
||||
)
|
||||
|
||||
def create_column_comment(self, column):
|
||||
"""the batch table creation function will issue create_column_comment
|
||||
on the real "impl" as part of the create table process.
|
||||
|
||||
That is, the Column object will have the comment on it already,
|
||||
so when it is received by add_column() it will be a normal part of
|
||||
the CREATE TABLE and doesn't need an extra step here.
|
||||
|
||||
"""
|
||||
|
||||
def create_table_comment(self, table):
|
||||
"""the batch table creation function will issue create_table_comment
|
||||
on the real "impl" as part of the create table process.
|
||||
|
||||
"""
|
||||
|
||||
def drop_table_comment(self, table):
|
||||
"""the batch table creation function will issue drop_table_comment
|
||||
on the real "impl" as part of the create table process.
|
||||
|
||||
"""
|
||||
|
||||
def add_constraint(self, const: Constraint) -> None:
|
||||
if not constraint_name_defined(const.name):
|
||||
raise ValueError("Constraint must have a name")
|
||||
if isinstance(const, sql_schema.PrimaryKeyConstraint):
|
||||
if self.table.primary_key in self.unnamed_constraints:
|
||||
self.unnamed_constraints.remove(self.table.primary_key)
|
||||
|
||||
if constraint_name_string(const.name):
|
||||
self.named_constraints[const.name] = const
|
||||
else:
|
||||
self.unnamed_constraints.append(const)
|
||||
|
||||
def drop_constraint(self, const: Constraint) -> None:
|
||||
if not const.name:
|
||||
raise ValueError("Constraint must have a name")
|
||||
try:
|
||||
if const.name in self.col_named_constraints:
|
||||
col, const = self.col_named_constraints.pop(const.name)
|
||||
|
||||
for col_const in list(self.columns[col.name].constraints):
|
||||
if col_const.name == const.name:
|
||||
self.columns[col.name].constraints.remove(col_const)
|
||||
elif constraint_name_string(const.name):
|
||||
const = self.named_constraints.pop(const.name)
|
||||
elif const in self.unnamed_constraints:
|
||||
self.unnamed_constraints.remove(const)
|
||||
|
||||
except KeyError:
|
||||
if _is_type_bound(const):
|
||||
# type-bound constraints are only included in the new
|
||||
# table via their type object in any case, so ignore the
|
||||
# drop_constraint() that comes here via the
|
||||
# Operations.implementation_for(alter_column)
|
||||
return
|
||||
raise ValueError("No such constraint: '%s'" % const.name)
|
||||
else:
|
||||
if isinstance(const, PrimaryKeyConstraint):
|
||||
for col in const.columns:
|
||||
self.columns[col.name].primary_key = False
|
||||
|
||||
def create_index(self, idx: Index) -> None:
|
||||
self.new_indexes[idx.name] = idx # type: ignore[index]
|
||||
|
||||
def drop_index(self, idx: Index) -> None:
|
||||
try:
|
||||
del self.indexes[idx.name] # type: ignore[arg-type]
|
||||
except KeyError:
|
||||
raise ValueError("No such index: '%s'" % idx.name)
|
||||
|
||||
def rename_table(self, *arg, **kw):
|
||||
raise NotImplementedError("TODO")
|
||||
2764
Backend/venv/lib/python3.12/site-packages/alembic/operations/ops.py
Normal file
2764
Backend/venv/lib/python3.12/site-packages/alembic/operations/ops.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,287 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy import schema as sa_schema
|
||||
from sqlalchemy.sql.schema import Column
|
||||
from sqlalchemy.sql.schema import Constraint
|
||||
from sqlalchemy.sql.schema import Index
|
||||
from sqlalchemy.types import Integer
|
||||
from sqlalchemy.types import NULLTYPE
|
||||
|
||||
from .. import util
|
||||
from ..util import sqla_compat
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sqlalchemy.sql.elements import ColumnElement
|
||||
from sqlalchemy.sql.elements import TextClause
|
||||
from sqlalchemy.sql.schema import CheckConstraint
|
||||
from sqlalchemy.sql.schema import ForeignKey
|
||||
from sqlalchemy.sql.schema import ForeignKeyConstraint
|
||||
from sqlalchemy.sql.schema import MetaData
|
||||
from sqlalchemy.sql.schema import PrimaryKeyConstraint
|
||||
from sqlalchemy.sql.schema import Table
|
||||
from sqlalchemy.sql.schema import UniqueConstraint
|
||||
from sqlalchemy.sql.type_api import TypeEngine
|
||||
|
||||
from ..runtime.migration import MigrationContext
|
||||
|
||||
|
||||
class SchemaObjects:
|
||||
def __init__(
|
||||
self, migration_context: Optional[MigrationContext] = None
|
||||
) -> None:
|
||||
self.migration_context = migration_context
|
||||
|
||||
def primary_key_constraint(
|
||||
self,
|
||||
name: Optional[sqla_compat._ConstraintNameDefined],
|
||||
table_name: str,
|
||||
cols: Sequence[str],
|
||||
schema: Optional[str] = None,
|
||||
**dialect_kw,
|
||||
) -> PrimaryKeyConstraint:
|
||||
m = self.metadata()
|
||||
columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
|
||||
t = sa_schema.Table(table_name, m, *columns, schema=schema)
|
||||
# SQLAlchemy primary key constraint name arg is wrongly typed on
|
||||
# the SQLAlchemy side through 2.0.5 at least
|
||||
p = sa_schema.PrimaryKeyConstraint(
|
||||
*[t.c[n] for n in cols], name=name, **dialect_kw # type: ignore
|
||||
)
|
||||
return p
|
||||
|
||||
def foreign_key_constraint(
|
||||
self,
|
||||
name: Optional[sqla_compat._ConstraintNameDefined],
|
||||
source: str,
|
||||
referent: str,
|
||||
local_cols: List[str],
|
||||
remote_cols: List[str],
|
||||
onupdate: Optional[str] = None,
|
||||
ondelete: Optional[str] = None,
|
||||
deferrable: Optional[bool] = None,
|
||||
source_schema: Optional[str] = None,
|
||||
referent_schema: Optional[str] = None,
|
||||
initially: Optional[str] = None,
|
||||
match: Optional[str] = None,
|
||||
**dialect_kw,
|
||||
) -> ForeignKeyConstraint:
|
||||
m = self.metadata()
|
||||
if source == referent and source_schema == referent_schema:
|
||||
t1_cols = local_cols + remote_cols
|
||||
else:
|
||||
t1_cols = local_cols
|
||||
sa_schema.Table(
|
||||
referent,
|
||||
m,
|
||||
*[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
|
||||
schema=referent_schema,
|
||||
)
|
||||
|
||||
t1 = sa_schema.Table(
|
||||
source,
|
||||
m,
|
||||
*[
|
||||
sa_schema.Column(n, NULLTYPE)
|
||||
for n in util.unique_list(t1_cols)
|
||||
],
|
||||
schema=source_schema,
|
||||
)
|
||||
|
||||
tname = (
|
||||
"%s.%s" % (referent_schema, referent)
|
||||
if referent_schema
|
||||
else referent
|
||||
)
|
||||
|
||||
dialect_kw["match"] = match
|
||||
|
||||
f = sa_schema.ForeignKeyConstraint(
|
||||
local_cols,
|
||||
["%s.%s" % (tname, n) for n in remote_cols],
|
||||
name=name,
|
||||
onupdate=onupdate,
|
||||
ondelete=ondelete,
|
||||
deferrable=deferrable,
|
||||
initially=initially,
|
||||
**dialect_kw,
|
||||
)
|
||||
t1.append_constraint(f)
|
||||
|
||||
return f
|
||||
|
||||
def unique_constraint(
|
||||
self,
|
||||
name: Optional[sqla_compat._ConstraintNameDefined],
|
||||
source: str,
|
||||
local_cols: Sequence[str],
|
||||
schema: Optional[str] = None,
|
||||
**kw,
|
||||
) -> UniqueConstraint:
|
||||
t = sa_schema.Table(
|
||||
source,
|
||||
self.metadata(),
|
||||
*[sa_schema.Column(n, NULLTYPE) for n in local_cols],
|
||||
schema=schema,
|
||||
)
|
||||
kw["name"] = name
|
||||
uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
|
||||
# TODO: need event tests to ensure the event
|
||||
# is fired off here
|
||||
t.append_constraint(uq)
|
||||
return uq
|
||||
|
||||
def check_constraint(
|
||||
self,
|
||||
name: Optional[sqla_compat._ConstraintNameDefined],
|
||||
source: str,
|
||||
condition: Union[str, TextClause, ColumnElement[Any]],
|
||||
schema: Optional[str] = None,
|
||||
**kw,
|
||||
) -> Union[CheckConstraint]:
|
||||
t = sa_schema.Table(
|
||||
source,
|
||||
self.metadata(),
|
||||
sa_schema.Column("x", Integer),
|
||||
schema=schema,
|
||||
)
|
||||
ck = sa_schema.CheckConstraint(condition, name=name, **kw)
|
||||
t.append_constraint(ck)
|
||||
return ck
|
||||
|
||||
def generic_constraint(
|
||||
self,
|
||||
name: Optional[sqla_compat._ConstraintNameDefined],
|
||||
table_name: str,
|
||||
type_: Optional[str],
|
||||
schema: Optional[str] = None,
|
||||
**kw,
|
||||
) -> Any:
|
||||
t = self.table(table_name, schema=schema)
|
||||
types: Dict[Optional[str], Any] = {
|
||||
"foreignkey": lambda name: sa_schema.ForeignKeyConstraint(
|
||||
[], [], name=name
|
||||
),
|
||||
"primary": sa_schema.PrimaryKeyConstraint,
|
||||
"unique": sa_schema.UniqueConstraint,
|
||||
"check": lambda name: sa_schema.CheckConstraint("", name=name),
|
||||
None: sa_schema.Constraint,
|
||||
}
|
||||
try:
|
||||
const = types[type_]
|
||||
except KeyError as ke:
|
||||
raise TypeError(
|
||||
"'type' can be one of %s"
|
||||
% ", ".join(sorted(repr(x) for x in types))
|
||||
) from ke
|
||||
else:
|
||||
const = const(name=name)
|
||||
t.append_constraint(const)
|
||||
return const
|
||||
|
||||
def metadata(self) -> MetaData:
|
||||
kw = {}
|
||||
if (
|
||||
self.migration_context is not None
|
||||
and "target_metadata" in self.migration_context.opts
|
||||
):
|
||||
mt = self.migration_context.opts["target_metadata"]
|
||||
if hasattr(mt, "naming_convention"):
|
||||
kw["naming_convention"] = mt.naming_convention
|
||||
return sa_schema.MetaData(**kw)
|
||||
|
||||
def table(self, name: str, *columns, **kw) -> Table:
|
||||
m = self.metadata()
|
||||
|
||||
cols = [
|
||||
sqla_compat._copy(c) if c.table is not None else c
|
||||
for c in columns
|
||||
if isinstance(c, Column)
|
||||
]
|
||||
# these flags have already added their UniqueConstraint /
|
||||
# Index objects to the table, so flip them off here.
|
||||
# SQLAlchemy tometadata() avoids this instead by preserving the
|
||||
# flags and skipping the constraints that have _type_bound on them,
|
||||
# but for a migration we'd rather list out the constraints
|
||||
# explicitly.
|
||||
_constraints_included = kw.pop("_constraints_included", False)
|
||||
if _constraints_included:
|
||||
for c in cols:
|
||||
c.unique = c.index = False
|
||||
|
||||
t = sa_schema.Table(name, m, *cols, **kw)
|
||||
|
||||
constraints = [
|
||||
sqla_compat._copy(elem, target_table=t)
|
||||
if getattr(elem, "parent", None) is not t
|
||||
and getattr(elem, "parent", None) is not None
|
||||
else elem
|
||||
for elem in columns
|
||||
if isinstance(elem, (Constraint, Index))
|
||||
]
|
||||
|
||||
for const in constraints:
|
||||
t.append_constraint(const)
|
||||
|
||||
for f in t.foreign_keys:
|
||||
self._ensure_table_for_fk(m, f)
|
||||
return t
|
||||
|
||||
def column(self, name: str, type_: TypeEngine, **kw) -> Column:
|
||||
return sa_schema.Column(name, type_, **kw)
|
||||
|
||||
def index(
|
||||
self,
|
||||
name: Optional[str],
|
||||
tablename: Optional[str],
|
||||
columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
|
||||
schema: Optional[str] = None,
|
||||
**kw,
|
||||
) -> Index:
|
||||
t = sa_schema.Table(
|
||||
tablename or "no_table",
|
||||
self.metadata(),
|
||||
schema=schema,
|
||||
)
|
||||
kw["_table"] = t
|
||||
idx = sa_schema.Index(
|
||||
name,
|
||||
*[util.sqla_compat._textual_index_column(t, n) for n in columns],
|
||||
**kw,
|
||||
)
|
||||
return idx
|
||||
|
||||
def _parse_table_key(self, table_key: str) -> Tuple[Optional[str], str]:
|
||||
if "." in table_key:
|
||||
tokens = table_key.split(".")
|
||||
sname: Optional[str] = ".".join(tokens[0:-1])
|
||||
tname = tokens[-1]
|
||||
else:
|
||||
tname = table_key
|
||||
sname = None
|
||||
return (sname, tname)
|
||||
|
||||
def _ensure_table_for_fk(self, metadata: MetaData, fk: ForeignKey) -> None:
|
||||
"""create a placeholder Table object for the referent of a
|
||||
ForeignKey.
|
||||
|
||||
"""
|
||||
if isinstance(fk._colspec, str): # type:ignore[attr-defined]
|
||||
table_key, cname = fk._colspec.rsplit( # type:ignore[attr-defined]
|
||||
".", 1
|
||||
)
|
||||
sname, tname = self._parse_table_key(table_key)
|
||||
if table_key not in metadata.tables:
|
||||
rel_t = sa_schema.Table(tname, metadata, schema=sname)
|
||||
else:
|
||||
rel_t = metadata.tables[table_key]
|
||||
if cname not in rel_t.c:
|
||||
rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
|
||||
@@ -0,0 +1,223 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import schema as sa_schema
|
||||
|
||||
from . import ops
|
||||
from .base import Operations
|
||||
from ..util.sqla_compat import _copy
|
||||
from ..util.sqla_compat import sqla_2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sqlalchemy.sql.schema import Table
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.AlterColumnOp)
|
||||
def alter_column(
|
||||
operations: "Operations", operation: "ops.AlterColumnOp"
|
||||
) -> None:
|
||||
compiler = operations.impl.dialect.statement_compiler(
|
||||
operations.impl.dialect, None
|
||||
)
|
||||
|
||||
existing_type = operation.existing_type
|
||||
existing_nullable = operation.existing_nullable
|
||||
existing_server_default = operation.existing_server_default
|
||||
type_ = operation.modify_type
|
||||
column_name = operation.column_name
|
||||
table_name = operation.table_name
|
||||
schema = operation.schema
|
||||
server_default = operation.modify_server_default
|
||||
new_column_name = operation.modify_name
|
||||
nullable = operation.modify_nullable
|
||||
comment = operation.modify_comment
|
||||
existing_comment = operation.existing_comment
|
||||
|
||||
def _count_constraint(constraint):
|
||||
return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and (
|
||||
not constraint._create_rule or constraint._create_rule(compiler)
|
||||
)
|
||||
|
||||
if existing_type and type_:
|
||||
t = operations.schema_obj.table(
|
||||
table_name,
|
||||
sa_schema.Column(column_name, existing_type),
|
||||
schema=schema,
|
||||
)
|
||||
for constraint in t.constraints:
|
||||
if _count_constraint(constraint):
|
||||
operations.impl.drop_constraint(constraint)
|
||||
|
||||
operations.impl.alter_column(
|
||||
table_name,
|
||||
column_name,
|
||||
nullable=nullable,
|
||||
server_default=server_default,
|
||||
name=new_column_name,
|
||||
type_=type_,
|
||||
schema=schema,
|
||||
existing_type=existing_type,
|
||||
existing_server_default=existing_server_default,
|
||||
existing_nullable=existing_nullable,
|
||||
comment=comment,
|
||||
existing_comment=existing_comment,
|
||||
**operation.kw,
|
||||
)
|
||||
|
||||
if type_:
|
||||
t = operations.schema_obj.table(
|
||||
table_name,
|
||||
operations.schema_obj.column(column_name, type_),
|
||||
schema=schema,
|
||||
)
|
||||
for constraint in t.constraints:
|
||||
if _count_constraint(constraint):
|
||||
operations.impl.add_constraint(constraint)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropTableOp)
|
||||
def drop_table(operations: "Operations", operation: "ops.DropTableOp") -> None:
|
||||
operations.impl.drop_table(
|
||||
operation.to_table(operations.migration_context)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropColumnOp)
|
||||
def drop_column(
|
||||
operations: "Operations", operation: "ops.DropColumnOp"
|
||||
) -> None:
|
||||
column = operation.to_column(operations.migration_context)
|
||||
operations.impl.drop_column(
|
||||
operation.table_name, column, schema=operation.schema, **operation.kw
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.CreateIndexOp)
|
||||
def create_index(
|
||||
operations: "Operations", operation: "ops.CreateIndexOp"
|
||||
) -> None:
|
||||
idx = operation.to_index(operations.migration_context)
|
||||
kw = {}
|
||||
if operation.if_not_exists is not None:
|
||||
if not sqla_2:
|
||||
raise NotImplementedError("SQLAlchemy 2.0+ required")
|
||||
|
||||
kw["if_not_exists"] = operation.if_not_exists
|
||||
operations.impl.create_index(idx, **kw)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropIndexOp)
|
||||
def drop_index(operations: "Operations", operation: "ops.DropIndexOp") -> None:
|
||||
kw = {}
|
||||
if operation.if_exists is not None:
|
||||
if not sqla_2:
|
||||
raise NotImplementedError("SQLAlchemy 2.0+ required")
|
||||
|
||||
kw["if_exists"] = operation.if_exists
|
||||
|
||||
operations.impl.drop_index(
|
||||
operation.to_index(operations.migration_context),
|
||||
**kw,
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.CreateTableOp)
|
||||
def create_table(
|
||||
operations: "Operations", operation: "ops.CreateTableOp"
|
||||
) -> "Table":
|
||||
table = operation.to_table(operations.migration_context)
|
||||
operations.impl.create_table(table)
|
||||
return table
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.RenameTableOp)
|
||||
def rename_table(
|
||||
operations: "Operations", operation: "ops.RenameTableOp"
|
||||
) -> None:
|
||||
operations.impl.rename_table(
|
||||
operation.table_name, operation.new_table_name, schema=operation.schema
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.CreateTableCommentOp)
|
||||
def create_table_comment(
|
||||
operations: "Operations", operation: "ops.CreateTableCommentOp"
|
||||
) -> None:
|
||||
table = operation.to_table(operations.migration_context)
|
||||
operations.impl.create_table_comment(table)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropTableCommentOp)
|
||||
def drop_table_comment(
|
||||
operations: "Operations", operation: "ops.DropTableCommentOp"
|
||||
) -> None:
|
||||
table = operation.to_table(operations.migration_context)
|
||||
operations.impl.drop_table_comment(table)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.AddColumnOp)
|
||||
def add_column(operations: "Operations", operation: "ops.AddColumnOp") -> None:
|
||||
table_name = operation.table_name
|
||||
column = operation.column
|
||||
schema = operation.schema
|
||||
kw = operation.kw
|
||||
|
||||
if column.table is not None:
|
||||
column = _copy(column)
|
||||
|
||||
t = operations.schema_obj.table(table_name, column, schema=schema)
|
||||
operations.impl.add_column(table_name, column, schema=schema, **kw)
|
||||
|
||||
for constraint in t.constraints:
|
||||
if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
|
||||
operations.impl.add_constraint(constraint)
|
||||
for index in t.indexes:
|
||||
operations.impl.create_index(index)
|
||||
|
||||
with_comment = (
|
||||
operations.impl.dialect.supports_comments
|
||||
and not operations.impl.dialect.inline_comments
|
||||
)
|
||||
comment = column.comment
|
||||
if comment and with_comment:
|
||||
operations.impl.create_column_comment(column)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.AddConstraintOp)
|
||||
def create_constraint(
|
||||
operations: "Operations", operation: "ops.AddConstraintOp"
|
||||
) -> None:
|
||||
operations.impl.add_constraint(
|
||||
operation.to_constraint(operations.migration_context)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.DropConstraintOp)
|
||||
def drop_constraint(
|
||||
operations: "Operations", operation: "ops.DropConstraintOp"
|
||||
) -> None:
|
||||
operations.impl.drop_constraint(
|
||||
operations.schema_obj.generic_constraint(
|
||||
operation.constraint_name,
|
||||
operation.table_name,
|
||||
operation.constraint_type,
|
||||
schema=operation.schema,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.BulkInsertOp)
|
||||
def bulk_insert(
|
||||
operations: "Operations", operation: "ops.BulkInsertOp"
|
||||
) -> None:
|
||||
operations.impl.bulk_insert( # type: ignore[union-attr]
|
||||
operation.table, operation.rows, multiinsert=operation.multiinsert
|
||||
)
|
||||
|
||||
|
||||
@Operations.implementation_for(ops.ExecuteSQLOp)
|
||||
def execute_sql(
|
||||
operations: "Operations", operation: "ops.ExecuteSQLOp"
|
||||
) -> None:
|
||||
operations.migration_context.impl.execute(
|
||||
operation.sqltext, execution_options=operation.execution_options
|
||||
)
|
||||
Reference in New Issue
Block a user