superset-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maximebeauche...@apache.org
Subject [incubator-superset] branch master updated: Fix uniqueness constraints on tables table (#6718)
Date Tue, 29 Jan 2019 06:49:36 GMT
This is an automated email from the ASF dual-hosted git repository.

maximebeauchemin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new c4fb7a0  Fix uniqueness constraints on tables table (#6718)
c4fb7a0 is described below

commit c4fb7a0a8737421e06cb91a9c768d13ebab3335a
Author: agrawaldevesh <dagrawal@uber.com>
AuthorDate: Mon Jan 28 22:49:31 2019 -0800

    Fix uniqueness constraints on tables table (#6718)
    
    Summary: Superset code enforces (in Table crud view pre_add) that the
    table is unique within <database, schema, table_name). Indeed in commit
    15b67b2c6c3c2982f6620fce5d30bd05951458f7 (in 2016), the model was
    updated to reflect that. However, it was never ported over to a
    migration.
    
    I am fixing that in this diff. I am choosing to make this be a new
    migration instead of fixing an existing one since I want to fix existing
    installations also cleanly.
    
    I also considered removing the uniqueness constraint, but that won't
    work: First because anyway there are other places where the <database,
    schema, table> uniqueness is enforced in code. But also, the .sql field
    isn't a first citizen yet: The schema of the table is picked up from the
    table-name and the sql part is only used when creating the explore
    query. So indeed we want this uniqueness constraint. (Also it breaks the
    unit tests in dict_import_export_tests.py)
    [Perhaps it can be removed when we have true .sql support, but for now
    the user would have to create a database view and he can use that as the
    'table name'. That way he gets schema inference also]
    
    Also added INFO logging to the alembic migration.
---
 superset/connectors/sqla/models.py                 |  5 +-
 superset/migrations/alembic.ini                    |  2 +-
 ...823bf_make_table_unique_within_db_and_schema.py | 77 ++++++++++++++++++++++
 superset/models/helpers.py                         | 19 ++++--
 4 files changed, 96 insertions(+), 7 deletions(-)

diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index dff4559..8a21f52 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -251,7 +251,10 @@ class SqlaTable(Model, BaseDatasource):
     owner_class = security_manager.user_model
 
     __tablename__ = 'tables'
-    __table_args__ = (UniqueConstraint('database_id', 'table_name'),)
+    __table_args__ = (UniqueConstraint('database_id',
+                                       'schema',
+                                       'table_name',
+                                       name='uq_table_in_db_schema'),)
 
     table_name = Column(String(250))
     main_dttm_col = Column(String(250))
diff --git a/superset/migrations/alembic.ini b/superset/migrations/alembic.ini
index 5bd6d2f..de65bc5 100644
--- a/superset/migrations/alembic.ini
+++ b/superset/migrations/alembic.ini
@@ -42,7 +42,7 @@ handlers = console
 qualname =
 
 [logger_sqlalchemy]
-level = WARN
+level = INFO
 handlers =
 qualname = sqlalchemy.engine
 
diff --git a/superset/migrations/versions/8d49a37823bf_make_table_unique_within_db_and_schema.py
b/superset/migrations/versions/8d49a37823bf_make_table_unique_within_db_and_schema.py
new file mode 100644
index 0000000..518ddb3
--- /dev/null
+++ b/superset/migrations/versions/8d49a37823bf_make_table_unique_within_db_and_schema.py
@@ -0,0 +1,77 @@
+"""make_table_unique_within_db_and_schema
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Revision ID: 8d49a37823bf
+Revises: 18dc26817ad2
+Create Date: 2019-01-20 11:44:14.640628
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '8d49a37823bf'
+down_revision = '18dc26817ad2'
+
+from alembic import op
+import sqlalchemy as sa
+
+from superset.utils.core import generic_find_uq_constraint_name
+from collections import OrderedDict
+
+def is_unique_constraint(constraint):
+    return constraint and isinstance(constraint, sa.UniqueConstraint)
+
+def is_sqlite():
+    bind = op.get_bind()
+    return bind and bind.dialect and bind.dialect.name and bind.dialect.name.startswith('sqlite')
+
+def upgrade():
+    bind = op.get_bind()
+    insp = sa.engine.reflection.Inspector.from_engine(bind)
+    constraints = insp.get_unique_constraints('tables')
+    table_new_uniq_constraint = ['database_id', 'schema', 'table_name']
+    if not constraints:
+        constraints = []
+    # Sqlite cannot handle constraint change and has to recreate the table
+    if is_sqlite():
+        existing_table = sa.Table(
+            'tables', sa.MetaData(),
+            autoload=True,
+            autoload_with=op.get_bind())
+        existing_table.constraints = set([c for c in existing_table.constraints if not is_unique_constraint(c)])
+        # We don't want to preserve the existing table_args for the tables table
+        with op.batch_alter_table('tables', copy_from=existing_table, recreate="always")
as batch_op:
+            batch_op.create_unique_constraint('uq_table_in_db_schema', table_new_uniq_constraint)
+    else:
+        op.create_unique_constraint('uq_table_in_db_schema', 'tables', table_new_uniq_constraint)
+        # and for other databases we need to explicitly remove the earlier constraints
+        # otherwise they don't get removed as with above copy_from approach
+        for c in constraints:
+            name = c.get('name', None)
+            if name:
+                op.drop_constraint(name, 'tables', type_='unique')
+
+def downgrade():
+    table_name_existing_unique = ['database_id', 'table_name']
+    if is_sqlite():
+        with op.batch_alter_table('tables', recreate="always") as batch_op:
+            batch_op.create_unique_constraint(
+                'uq_tables_table_name', 
+                table_name_existing_unique)
+            batch_op.drop_constraint('uq_table_in_db_schema', type_='unique')
+    else:
+        op.create_unique_constraint('uq_tables_table_name', 'tables', table_name_existing_unique)
+        op.drop_constraint('uq_table_in_db_schema', 'tables', type_='unique')
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index a1e9b3c..d08fe01 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -97,10 +97,13 @@ class ImportMixin(object):
 
     @classmethod
     def import_from_dict(cls, session, dict_rep, parent=None,
-                         recursive=True, sync=[]):
+                         recursive=True, sync=[], respect_id=True):
         """Import obj from a dictionary"""
         parent_refs = cls._parent_foreign_key_mappings()
         export_fields = set(cls.export_fields) | set(parent_refs.keys())
+        logging.info(f'Doing the import_from_dict for the {cls}, with {dict_rep}, '
+                     f'respect_id={respect_id}')
+        given_id = dict_rep.get('id', None) if respect_id else None
         new_children = {c: dict_rep.get(c) for c in cls.export_children
                         if c in dict_rep}
         unique_constrains = cls._unique_constrains()
@@ -128,14 +131,20 @@ class ImportMixin(object):
                         for k in parent_refs.keys()])
 
         # Add filter for unique constraints
-        ucs = [and_(*[getattr(cls, k) == dict_rep.get(k)
-               for k in cs if dict_rep.get(k) is not None])
-               for cs in unique_constrains]
-        filters.append(or_(*ucs))
+        if unique_constrains:
+            ucs = [and_(*[getattr(cls, k) == dict_rep.get(k)
+                   for k in cs if dict_rep.get(k) is not None])
+                   for cs in unique_constrains]
+            filters.append(or_(*ucs))
+        elif given_id:
+            logging.info(f'Not given any unique constraint, so adding an id check for'
+                         f'{getattr(cls, "id")} equal to {given_id}')
+            filters.append(getattr(cls, 'id') == given_id)
 
         # Check if object already exists in DB, break if more than one is found
         try:
             obj_query = session.query(cls).filter(and_(*filters))
+            logging.info(f'Did the query {str(obj_query)} to find existing for {cls}')
             obj = obj_query.one_or_none()
         except MultipleResultsFound as e:
             logging.error('Error importing %s \n %s \n %s', cls.__name__,


Mime
View raw message