Skip to content

Commit

Permalink
chore(dao): Add explicit ON DELETE CASCADE for ownership (#24628)
Browse files Browse the repository at this point in the history
  • Loading branch information
john-bodley authored and michael-s-molina committed Jul 12, 2023
1 parent 71eff95 commit 96cf640
Show file tree
Hide file tree
Showing 19 changed files with 200 additions and 120 deletions.
1 change: 1 addition & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ assists people when migrating to a new version.

## 3.0.0

- [24628]https://github.com/apache/superset/pull/24628): Augments the foreign key constraints for the `dashboard_owner`, `report_schedule_owner`, and `slice_owner` tables to include an explicit CASCADE ON DELETE to ensure the relevant ownership records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24488](https://github.com/apache/superset/pull/24488): Augments the foreign key constraints for the `sql_metrics`, `sqlatable_user`, and `table_columns` tables which reference the `tables` table to include an explicit CASCADE ON DELETE to ensure the relevant records are deleted when a dataset is deleted. Scheduled downtime may be advised.
- [24335](https://github.com/apache/superset/pull/24335): Removed deprecated API `/superset/filter/<datasource_type>/<int:datasource_id>/<column>/`
- [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated.
Expand Down
4 changes: 0 additions & 4 deletions superset/charts/commands/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,6 @@ def run(self) -> None:
self._model = cast(Slice, self._model)
try:
Dashboard.clear_cache_for_slice(slice_id=self._model_id)
# Even though SQLAlchemy should in theory delete rows from the association
# table, sporadically Superset will error because the rows are not deleted.
# Let's do it manually here to prevent the error.
self._model.owners = []
ChartDAO.delete(self._model)
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
Expand Down
1 change: 0 additions & 1 deletion superset/daos/chart.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ def delete(cls, items: Slice | list[Slice], commit: bool = True) -> None:
item_ids = [item.id for item in get_iterable(items)]
# bulk delete, first delete related data
for item in get_iterable(items):
item.owners = []
item.dashboards = []
db.session.merge(item)
# bulk delete itself
Expand Down
1 change: 0 additions & 1 deletion superset/daos/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ def delete(cls, items: Dashboard | list[Dashboard], commit: bool = True) -> None
# bulk delete, first delete related data
for item in get_iterable(items):
item.slices = []
item.owners = []
item.embedded = []
db.session.merge(item)
# bulk delete itself
Expand Down
26 changes: 1 addition & 25 deletions superset/daos/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
ReportScheduleType,
ReportState,
)
from superset.utils.core import get_iterable, get_user_id
from superset.utils.core import get_user_id

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -95,30 +95,6 @@ def find_by_database_ids(database_ids: list[int]) -> list[ReportSchedule]:
.all()
)

@classmethod
def delete(
cls,
items: ReportSchedule | list[ReportSchedule],
commit: bool = True,
) -> None:
item_ids = [item.id for item in get_iterable(items)]
try:
# Clean owners secondary table
report_schedules = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.id.in_(item_ids))
.all()
)
for report_schedule in report_schedules:
report_schedule.owners = []
for report_schedule in report_schedules:
db.session.delete(report_schedule)
if commit:
db.session.commit()
except SQLAlchemyError as ex:
db.session.rollback()
raise DAODeleteFailedError(str(ex)) from ex

@staticmethod
def validate_unique_creation_method(
dashboard_id: int | None = None, chart_id: int | None = None
Expand Down
1 change: 0 additions & 1 deletion superset/examples/birth_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,6 @@ def create_dashboard(slices: list[Slice]) -> Dashboard:
dash = db.session.query(Dashboard).filter_by(slug="births").first()
if not dash:
dash = Dashboard()
dash.owners = []
db.session.add(dash)

dash.published = True
Expand Down
57 changes: 57 additions & 0 deletions superset/migrations/shared/constraints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from __future__ import annotations

from dataclasses import dataclass

from alembic import op
from sqlalchemy.engine.reflection import Inspector

from superset.utils.core import generic_find_fk_constraint_name


@dataclass(frozen=True)
class ForeignKey:
table: str
referent_table: str
local_cols: list[str]
remote_cols: list[str]

@property
def constraint_name(self) -> str:
return f"fk_{self.table}_{self.local_cols[0]}_{self.referent_table}"


def redefine(
foreign_key: ForeignKey,
on_delete: str | None = None,
on_update: str | None = None,
) -> None:
"""
Redefine the foreign key constraint to include the ON DELETE and ON UPDATE
constructs for cascading purposes.
:params foreign_key: The foreign key constraint
:param ondelete: If set, emit ON DELETE <value> when issuing DDL operations
:param onupdate: If set, emit ON UPDATE <value> when issuing DDL operations
"""

bind = op.get_bind()
insp = Inspector.from_engine(bind)
conv = {"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s"}

with op.batch_alter_table(foreign_key.table, naming_convention=conv) as batch_op:
if constraint := generic_find_fk_constraint_name(
table=foreign_key.table,
columns=set(foreign_key.remote_cols),
referenced=foreign_key.referent_table,
insp=insp,
):
batch_op.drop_constraint(constraint, type_="foreignkey")

batch_op.create_foreign_key(
constraint_name=foreign_key.constraint_name,
referent_table=foreign_key.referent_table,
local_cols=foreign_key.local_cols,
remote_cols=foreign_key.remote_cols,
ondelete=on_delete,
onupdate=on_update,
)
Original file line number Diff line number Diff line change
Expand Up @@ -21,74 +21,46 @@
Create Date: 2023-06-22 13:39:47.989373
"""
from __future__ import annotations

# revision identifiers, used by Alembic.
revision = "6fbe660cac39"
down_revision = "90139bf715e4"

import sqlalchemy as sa
from alembic import op

from superset.utils.core import generic_find_fk_constraint_name


def migrate(ondelete: str | None) -> None:
"""
Redefine the foreign key constraints, via a successive DROP and ADD, for all tables
related to the `tables` table to include the ON DELETE construct for cascading
purposes.
:param ondelete: If set, emit ON DELETE <value> when issuing DDL for this constraint
"""

bind = op.get_bind()
insp = sa.engine.reflection.Inspector.from_engine(bind)

conv = {
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
}

for table in ("sql_metrics", "table_columns"):
with op.batch_alter_table(table, naming_convention=conv) as batch_op:
if constraint := generic_find_fk_constraint_name(
table=table,
columns={"id"},
referenced="tables",
insp=insp,
):
batch_op.drop_constraint(constraint, type_="foreignkey")

batch_op.create_foreign_key(
constraint_name=f"fk_{table}_table_id_tables",
referent_table="tables",
local_cols=["table_id"],
remote_cols=["id"],
ondelete=ondelete,
)

with op.batch_alter_table("sqlatable_user", naming_convention=conv) as batch_op:
for table, column in zip(("ab_user", "tables"), ("user_id", "table_id")):
if constraint := generic_find_fk_constraint_name(
table="sqlatable_user",
columns={"id"},
referenced=table,
insp=insp,
):
batch_op.drop_constraint(constraint, type_="foreignkey")

batch_op.create_foreign_key(
constraint_name=f"fk_sqlatable_user_{column}_{table}",
referent_table=table,
local_cols=[column],
remote_cols=["id"],
ondelete=ondelete,
)
from superset.migrations.shared.constraints import ForeignKey, redefine

foreign_keys = [
ForeignKey(
table="sql_metrics",
referent_table="tables",
local_cols=["table_id"],
remote_cols=["id"],
),
ForeignKey(
table="table_columns",
referent_table="tables",
local_cols=["table_id"],
remote_cols=["id"],
),
ForeignKey(
table="sqlatable_user",
referent_table="ab_user",
local_cols=["user_id"],
remote_cols=["id"],
),
ForeignKey(
table="sqlatable_user",
referent_table="tables",
local_cols=["table_id"],
remote_cols=["id"],
),
]


def upgrade():
migrate(ondelete="CASCADE")
for foreign_key in foreign_keys:
redefine(foreign_key, on_delete="CASCADE")


def downgrade():
migrate(ondelete=None)
for foreign_key in foreign_keys:
redefine(foreign_key)
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add on delete cascade for owners references
Revision ID: 6d05b0a70c89
Revises: f92a3124dd66
Create Date: 2023-07-11 15:51:57.964635
"""

# revision identifiers, used by Alembic.
revision = "6d05b0a70c89"
down_revision = "f92a3124dd66"

from superset.migrations.shared.constraints import ForeignKey, redefine

foreign_keys = [
ForeignKey(
table="dashboard_user",
referent_table="ab_user",
local_cols=["user_id"],
remote_cols=["id"],
),
ForeignKey(
table="dashboard_user",
referent_table="dashboards",
local_cols=["dashboard_id"],
remote_cols=["id"],
),
ForeignKey(
table="report_schedule_user",
referent_table="ab_user",
local_cols=["user_id"],
remote_cols=["id"],
),
ForeignKey(
table="report_schedule_user",
referent_table="report_schedule",
local_cols=["report_schedule_id"],
remote_cols=["id"],
),
ForeignKey(
table="slice_user",
referent_table="ab_user",
local_cols=["user_id"],
remote_cols=["id"],
),
ForeignKey(
table="slice_user",
referent_table="slices",
local_cols=["slice_id"],
remote_cols=["id"],
),
]


def upgrade():
for foreign_key in foreign_keys:
redefine(foreign_key, on_delete="CASCADE")


def downgrade():
for foreign_key in foreign_keys:
redefine(foreign_key)
10 changes: 7 additions & 3 deletions superset/models/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ def copy_dashboard(_mapper: Mapper, connection: Connection, target: Dashboard) -
"dashboard_user",
metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, ForeignKey("ab_user.id")),
Column("dashboard_id", Integer, ForeignKey("dashboards.id")),
Column("user_id", Integer, ForeignKey("ab_user.id", ondelete="CASCADE")),
Column("dashboard_id", Integer, ForeignKey("dashboards.id", ondelete="CASCADE")),
)


Expand Down Expand Up @@ -146,7 +146,11 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
slices: list[Slice] = relationship(
Slice, secondary=dashboard_slices, backref="dashboards"
)
owners = relationship(security_manager.user_model, secondary=dashboard_user)
owners = relationship(
security_manager.user_model,
secondary=dashboard_user,
passive_deletes=True,
)
tags = relationship(
"Tag",
overlaps="objects,tag,tags,tags",
Expand Down
10 changes: 7 additions & 3 deletions superset/models/slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@
"slice_user",
metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, ForeignKey("ab_user.id")),
Column("slice_id", Integer, ForeignKey("slices.id")),
Column("user_id", Integer, ForeignKey("ab_user.id", ondelete="CASCADE")),
Column("slice_id", Integer, ForeignKey("slices.id", ondelete="CASCADE")),
)
logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -95,7 +95,11 @@ class Slice( # pylint: disable=too-many-public-methods
last_saved_by = relationship(
security_manager.user_model, foreign_keys=[last_saved_by_fk]
)
owners = relationship(security_manager.user_model, secondary=slice_user)
owners = relationship(
security_manager.user_model,
secondary=slice_user,
passive_deletes=True,
)
tags = relationship(
"Tag",
secondary="tagged_object",
Expand Down
Loading

0 comments on commit 96cf640

Please sign in to comment.