superset-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From grace...@apache.org
Subject [incubator-superset] branch master updated: Revert #5991 (#6035)
Date Thu, 04 Oct 2018 20:43:16 GMT
This is an automated email from the ASF dual-hosted git repository.

graceguo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new a9ef0ae  Revert #5991 (#6035)
a9ef0ae is described below

commit a9ef0aeaf58fdd42265339f3d26a49ad01b03033
Author: Grace Guo <grace.guo@airbnb.com>
AuthorDate: Thu Oct 4 13:43:01 2018 -0700

    Revert #5991 (#6035)
---
 superset/cli.py                          | 52 ++------------------------------
 superset/dashboard_import_export_util.py | 39 ------------------------
 superset/views/core.py                   | 16 ++++++++--
 tests/import_export_tests.py             | 33 +-------------------
 4 files changed, 16 insertions(+), 124 deletions(-)

diff --git a/superset/cli.py b/superset/cli.py
index 4a34bdc..6183601 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -18,8 +18,7 @@ import werkzeug.serving
 import yaml
 
 from superset import (
-    app, dashboard_import_export_util, data, db,
-    dict_import_export_util, security_manager, utils,
+    app, data, db, dict_import_export_util, security_manager, utils,
 )
 
 config = app.config
@@ -228,53 +227,6 @@ def refresh_druid(datasource, merge):
 @app.cli.command()
 @click.option(
     '--path', '-p',
-    help='Path to a single JSON file or path containing multiple JSON files'
-         'files to import (*.json)')
-@click.option(
-    '--recursive', '-r',
-    help='recursively search the path for json files')
-def import_dashboards(path, recursive=False):
-    """Import dashboards from JSON"""
-    p = Path(path)
-    files = []
-    if p.is_file():
-        files.append(p)
-    elif p.exists() and not recursive:
-        files.extend(p.glob('*.json'))
-    elif p.exists() and recursive:
-        files.extend(p.rglob('*.json'))
-    for f in files:
-        logging.info('Importing dashboard from file %s', f)
-        try:
-            with f.open() as data_stream:
-                dashboard_import_export_util.import_dashboards(
-                    db.session, data_stream)
-        except Exception as e:
-            logging.error('Error when importing dashboard from file %s', f)
-            logging.error(e)
-
-
-@app.cli.command()
-@click.option(
-    '--dashboard-file', '-f', default=None,
-    help='Specify the the file to export to')
-@click.option(
-    '--print_stdout', '-p',
-    help='Print JSON to stdout')
-def export_dashboards(print_stdout, dashboard_file):
-    """Export dashboards to JSON"""
-    data = dashboard_import_export_util.export_dashboards(db.session)
-    if print_stdout or not dashboard_file:
-        print(data)
-    if dashboard_file:
-        logging.info('Exporting dashboards to %s', dashboard_file)
-        with open(dashboard_file, 'w') as data_stream:
-            data_stream.write(data)
-
-
-@app.cli.command()
-@click.option(
-    '--path', '-p',
     help='Path to a single YAML file or path containing multiple YAML '
          'files to import (*.yaml or *.yml)')
 @click.option(
@@ -316,7 +268,7 @@ def import_datasources(path, sync, recursive=False):
     '--datasource-file', '-f', default=None,
     help='Specify the the file to export to')
 @click.option(
-    '--print_stdout', '-p',
+    '--print', '-p',
     help='Print YAML to stdout')
 @click.option(
     '--back-references', '-b',
diff --git a/superset/dashboard_import_export_util.py b/superset/dashboard_import_export_util.py
deleted file mode 100644
index 0a8fd25..0000000
--- a/superset/dashboard_import_export_util.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# -*- coding: utf-8 -*-
-# pylint: disable=C,R,W
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-from __future__ import unicode_literals
-
-import json
-import logging
-import time
-
-from superset import utils
-from superset.models.core import Dashboard
-
-
-def import_dashboards(session, data_stream, import_time=None):
-    """Imports dashboards from a stream to databases"""
-    current_tt = int(time.time())
-    import_time = current_tt if import_time is None else import_time
-    data = json.loads(data_stream.read(), object_hook=utils.decode_dashboards)
-    # TODO: import DRUID datasources
-    for table in data['datasources']:
-        type(table).import_obj(table, import_time=import_time)
-    session.commit()
-    for dashboard in data['dashboards']:
-        Dashboard.import_obj(
-            dashboard, import_time=import_time)
-    session.commit()
-
-
-def export_dashboards(session):
-    """Returns all dashboards metadata as a json dump"""
-    logging.info('Starting export')
-    dashboards = session.query(Dashboard)
-    dashboard_ids = []
-    for dashboard in dashboards:
-        dashboard_ids.append(dashboard.id)
-    data = Dashboard.export_dashboards(dashboard_ids)
-    return data
diff --git a/superset/views/core.py b/superset/views/core.py
index 6d37570..f811fd8 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -34,8 +34,9 @@ from werkzeug.routing import BaseConverter
 from werkzeug.utils import secure_filename
 
 from superset import (
-    app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
-    security_manager, sql_lab, utils, viz)
+    app, appbuilder, cache, db, results_backend, security_manager, sql_lab, utils,
+    viz,
+)
 from superset.connectors.connector_registry import ConnectorRegistry
 from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
 from superset.exceptions import SupersetException
@@ -1237,7 +1238,16 @@ class Superset(BaseSupersetView):
         """Overrides the dashboards using json instances from the file."""
         f = request.files.get('file')
         if request.method == 'POST' and f:
-            dashboard_import_export_util.import_dashboards(db.session, f.stream)
+            current_tt = int(time.time())
+            data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards)
+            # TODO: import DRUID datasources
+            for table in data['datasources']:
+                type(table).import_obj(table, import_time=current_tt)
+            db.session.commit()
+            for dashboard in data['dashboards']:
+                models.Dashboard.import_obj(
+                    dashboard, import_time=current_tt)
+            db.session.commit()
             return redirect('/dashboard/list/')
         return self.render_template('superset/import_dashboards.html')
 
diff --git a/tests/import_export_tests.py b/tests/import_export_tests.py
index 932f9e0..3a3d5f9 100644
--- a/tests/import_export_tests.py
+++ b/tests/import_export_tests.py
@@ -10,7 +10,7 @@ import unittest
 
 from sqlalchemy.orm.session import make_transient
 
-from superset import dashboard_import_export_util, db, utils
+from superset import db, utils
 from superset.connectors.druid.models import (
     DruidColumn, DruidDatasource, DruidMetric,
 )
@@ -149,9 +149,6 @@ class ImportExportTests(SupersetTestCase):
         return db.session.query(SqlaTable).filter_by(
             table_name=name).first()
 
-    def get_num_dashboards(self):
-        return db.session.query(models.Dashboard).count()
-
     def assert_dash_equals(self, expected_dash, actual_dash,
                            check_position=True):
         self.assertEquals(expected_dash.slug, actual_dash.slug)
@@ -550,34 +547,6 @@ class ImportExportTests(SupersetTestCase):
         self.assert_datasource_equals(
             copy_datasource, self.get_datasource(imported_id))
 
-    def test_export_dashboards_util(self):
-        dashboards_json_dump = dashboard_import_export_util.export_dashboards(
-            db.session)
-        dashboards_objects = json.loads(
-            dashboards_json_dump,
-            object_hook=utils.decode_dashboards,
-        )
-
-        exported_dashboards = dashboards_objects['dashboards']
-        for dashboard in exported_dashboards:
-            id_ = dashboard.id
-            dash = self.get_dash(id_)
-            self.assert_dash_equals(dash, dashboard)
-            self.assertEquals(
-                dash.id, json.loads(
-                    dashboard.json_metadata,
-                    object_hook=utils.decode_dashboards,
-                )['remote_id'],
-            )
-        numDasboards = self.get_num_dashboards()
-        self.assertEquals(numDasboards, len(exported_dashboards))
-
-        exported_tables = dashboards_objects['datasources']
-        for exported_table in exported_tables:
-            id_ = exported_table.id
-            table = self.get_table(id_)
-            self.assert_table_equals(table, exported_table)
-
 
 if __name__ == '__main__':
     unittest.main()


Mime
View raw message