superset-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From johnbod...@apache.org
Subject [incubator-superset] branch master updated: Revert "Removing uniqueness constraints on tables table" (#6777)
Date Thu, 31 Jan 2019 17:57:58 GMT
This is an automated email from the ASF dual-hosted git repository.

johnbodley pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 2631558  Revert "Removing uniqueness constraints on tables table" (#6777)
2631558 is described below

commit 2631558ac4ba596b24876a9b0702bbf3a6bfbb38
Author: John Bodley <4567245+john-bodley@users.noreply.github.com>
AuthorDate: Thu Jan 31 09:57:51 2019 -0800

    Revert "Removing uniqueness constraints on tables table" (#6777)
    
    * Revert "creating new circular-json safe stringify and replacing one call (#6772)"
    
    This reverts commit 11a7ad00b7c44785752bf23ef90bcf4ebb58598e.
    
    * Revert "Improve Unicode support for MSSQL (#6690)"
    
    This reverts commit c44ae612dfc797e2002134cc3fc78eb7057beeed.
    
    * Revert "Fix uniqueness constraints on tables table (#6718)"
    
    This reverts commit c4fb7a0a8737421e06cb91a9c768d13ebab3335a.
---
 superset/connectors/sqla/models.py                 |  5 +-
 superset/migrations/alembic.ini                    |  2 +-
 ...823bf_make_table_unique_within_db_and_schema.py | 77 ----------------------
 superset/models/helpers.py                         | 19 ++----
 4 files changed, 7 insertions(+), 96 deletions(-)

diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index fa53e31..cfbea85 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -253,10 +253,7 @@ class SqlaTable(Model, BaseDatasource):
     owner_class = security_manager.user_model
 
     __tablename__ = 'tables'
-    __table_args__ = (UniqueConstraint('database_id',
-                                       'schema',
-                                       'table_name',
-                                       name='uq_table_in_db_schema'),)
+    __table_args__ = (UniqueConstraint('database_id', 'table_name'),)
 
     table_name = Column(String(250))
     main_dttm_col = Column(String(250))
diff --git a/superset/migrations/alembic.ini b/superset/migrations/alembic.ini
index de65bc5..5bd6d2f 100644
--- a/superset/migrations/alembic.ini
+++ b/superset/migrations/alembic.ini
@@ -42,7 +42,7 @@ handlers = console
 qualname =
 
 [logger_sqlalchemy]
-level = INFO
+level = WARN
 handlers =
 qualname = sqlalchemy.engine
 
diff --git a/superset/migrations/versions/8d49a37823bf_make_table_unique_within_db_and_schema.py
b/superset/migrations/versions/8d49a37823bf_make_table_unique_within_db_and_schema.py
deleted file mode 100644
index 518ddb3..0000000
--- a/superset/migrations/versions/8d49a37823bf_make_table_unique_within_db_and_schema.py
+++ /dev/null
@@ -1,77 +0,0 @@
-"""make_table_unique_within_db_and_schema
-
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Revision ID: 8d49a37823bf
-Revises: 18dc26817ad2
-Create Date: 2019-01-20 11:44:14.640628
-
-"""
-
-# revision identifiers, used by Alembic.
-revision = '8d49a37823bf'
-down_revision = '18dc26817ad2'
-
-from alembic import op
-import sqlalchemy as sa
-
-from superset.utils.core import generic_find_uq_constraint_name
-from collections import OrderedDict
-
-def is_unique_constraint(constraint):
-    return constraint and isinstance(constraint, sa.UniqueConstraint)
-
-def is_sqlite():
-    bind = op.get_bind()
-    return bind and bind.dialect and bind.dialect.name and bind.dialect.name.startswith('sqlite')
-
-def upgrade():
-    bind = op.get_bind()
-    insp = sa.engine.reflection.Inspector.from_engine(bind)
-    constraints = insp.get_unique_constraints('tables')
-    table_new_uniq_constraint = ['database_id', 'schema', 'table_name']
-    if not constraints:
-        constraints = []
-    # Sqlite cannot handle constraint change and has to recreate the table
-    if is_sqlite():
-        existing_table = sa.Table(
-            'tables', sa.MetaData(),
-            autoload=True,
-            autoload_with=op.get_bind())
-        existing_table.constraints = set([c for c in existing_table.constraints if not is_unique_constraint(c)])
-        # We don't want to preserve the existing table_args for the tables table
-        with op.batch_alter_table('tables', copy_from=existing_table, recreate="always")
as batch_op:
-            batch_op.create_unique_constraint('uq_table_in_db_schema', table_new_uniq_constraint)
-    else:
-        op.create_unique_constraint('uq_table_in_db_schema', 'tables', table_new_uniq_constraint)
-        # and for other databases we need to explicitly remove the earlier constraints
-        # otherwise they don't get removed as with above copy_from approach
-        for c in constraints:
-            name = c.get('name', None)
-            if name:
-                op.drop_constraint(name, 'tables', type_='unique')
-
-def downgrade():
-    table_name_existing_unique = ['database_id', 'table_name']
-    if is_sqlite():
-        with op.batch_alter_table('tables', recreate="always") as batch_op:
-            batch_op.create_unique_constraint(
-                'uq_tables_table_name', 
-                table_name_existing_unique)
-            batch_op.drop_constraint('uq_table_in_db_schema', type_='unique')
-    else:
-        op.create_unique_constraint('uq_tables_table_name', 'tables', table_name_existing_unique)
-        op.drop_constraint('uq_table_in_db_schema', 'tables', type_='unique')
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index d08fe01..a1e9b3c 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -97,13 +97,10 @@ class ImportMixin(object):
 
     @classmethod
     def import_from_dict(cls, session, dict_rep, parent=None,
-                         recursive=True, sync=[], respect_id=True):
+                         recursive=True, sync=[]):
         """Import obj from a dictionary"""
         parent_refs = cls._parent_foreign_key_mappings()
         export_fields = set(cls.export_fields) | set(parent_refs.keys())
-        logging.info(f'Doing the import_from_dict for the {cls}, with {dict_rep}, '
-                     f'respect_id={respect_id}')
-        given_id = dict_rep.get('id', None) if respect_id else None
         new_children = {c: dict_rep.get(c) for c in cls.export_children
                         if c in dict_rep}
         unique_constrains = cls._unique_constrains()
@@ -131,20 +128,14 @@ class ImportMixin(object):
                         for k in parent_refs.keys()])
 
         # Add filter for unique constraints
-        if unique_constrains:
-            ucs = [and_(*[getattr(cls, k) == dict_rep.get(k)
-                   for k in cs if dict_rep.get(k) is not None])
-                   for cs in unique_constrains]
-            filters.append(or_(*ucs))
-        elif given_id:
-            logging.info(f'Not given any unique constraint, so adding an id check for'
-                         f'{getattr(cls, "id")} equal to {given_id}')
-            filters.append(getattr(cls, 'id') == given_id)
+        ucs = [and_(*[getattr(cls, k) == dict_rep.get(k)
+               for k in cs if dict_rep.get(k) is not None])
+               for cs in unique_constrains]
+        filters.append(or_(*ucs))
 
         # Check if object already exists in DB, break if more than one is found
         try:
             obj_query = session.query(cls).filter(and_(*filters))
-            logging.info(f'Did the query {str(obj_query)} to find existing for {cls}')
             obj = obj_query.one_or_none()
         except MultipleResultsFound as e:
             logging.error('Error importing %s \n %s \n %s', cls.__name__,


Mime
View raw message