You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by gr...@apache.org on 2019/07/22 23:48:52 UTC
[incubator-superset] branch master updated: [migration] add unique
constraint on dashboard_slices table (#7880)
This is an automated email from the ASF dual-hosted git repository.
graceguo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git
The following commit(s) were added to refs/heads/master by this push:
new 9dd6a38 [migration] add unique constraint on dashboard_slices table (#7880)
9dd6a38 is described below
commit 9dd6a385e672760d92f049d6e04e610b3a9ec102
Author: Grace Guo <gr...@airbnb.com>
AuthorDate: Mon Jul 22 16:48:41 2019 -0700
[migration] add unique constraint on dashboard_slices table (#7880)
1. remove duplicated entries in many-to-many relation tbl dashboard_slices
2. add unique constraint on tbl
3. update the model to include the uniqueness constraint
---
...ng_unique_constraint_on_dashboard_slices_tbl.py | 100 +++++++++++++++++++++
superset/models/core.py | 1 +
2 files changed, 101 insertions(+)
diff --git a/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py b/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py
new file mode 100644
index 0000000..a91b3da
--- /dev/null
+++ b/superset/migrations/versions/190188938582_adding_unique_constraint_on_dashboard_slices_tbl.py
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Remove duplicated entries in dashboard_slices table and add unique constraint
+
+Revision ID: 190188938582
+Revises: d6ffdf31bdd4
+Create Date: 2019-07-15 12:00:32.267507
+
+"""
+import logging
+
+from alembic import op
+from sqlalchemy import and_, Column, ForeignKey, Integer, Table
+from sqlalchemy.ext.declarative import declarative_base
+
+from superset import db
+
+# revision identifiers, used by Alembic.
+revision = "190188938582"
+down_revision = "d6ffdf31bdd4"
+
+Base = declarative_base()
+
+
+class DashboardSlices(Base):
+ __tablename__ = "dashboard_slices"
+ id = Column(Integer, primary_key=True)
+ dashboard_id = Column(Integer, ForeignKey("dashboards.id"))
+ slice_id = Column(Integer, ForeignKey("slices.id"))
+
+
+def upgrade():
+ bind = op.get_bind()
+ session = db.Session(bind=bind)
+
+ # find dup records in dashboard_slices tbl
+ dup_records = (
+ session.query(
+ DashboardSlices.dashboard_id,
+ DashboardSlices.slice_id,
+ db.func.count(DashboardSlices.id),
+ )
+ .group_by(DashboardSlices.dashboard_id, DashboardSlices.slice_id)
+ .having(db.func.count(DashboardSlices.id) > 1)
+ .all()
+ )
+
+ # remove dup entries
+ for record in dup_records:
+ print(
+ "remove duplicates from dashboard {} slice {}".format(
+ record.dashboard_id, record.slice_id
+ )
+ )
+
+ ids = [
+ item.id
+ for item in session.query(DashboardSlices.id)
+ .filter(
+ and_(
+ DashboardSlices.slice_id == record.slice_id,
+ DashboardSlices.dashboard_id == record.dashboard_id,
+ )
+ )
+ .offset(1)
+ ]
+ session.query(DashboardSlices).filter(DashboardSlices.id.in_(ids)).delete(
+ synchronize_session=False
+ )
+
+ # add unique constraint
+ try:
+ with op.batch_alter_table("dashboard_slices") as batch_op:
+ batch_op.create_unique_constraint(
+ "uq_dashboard_slice", ["dashboard_id", "slice_id"]
+ )
+ except Exception as e:
+ logging.exception(e)
+
+
+def downgrade():
+ try:
+ with op.batch_alter_table("dashboard_slices") as batch_op:
+ batch_op.drop_constraint("uq_dashboard_slice", type_="unique")
+ except Exception as e:
+ logging.exception(e)
diff --git a/superset/models/core.py b/superset/models/core.py
index 37a7592..59d38eb 100755
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -395,6 +395,7 @@ dashboard_slices = Table(
Column("id", Integer, primary_key=True),
Column("dashboard_id", Integer, ForeignKey("dashboards.id")),
Column("slice_id", Integer, ForeignKey("slices.id")),
+ UniqueConstraint("dashboard_id", "slice_id"),
)
dashboard_user = Table(