From commits-return-540-archive-asf-public=cust-asf.ponee.io@superset.incubator.apache.org Wed Jan 24 05:55:32 2018 Return-Path: X-Original-To: archive-asf-public@eu.ponee.io Delivered-To: archive-asf-public@eu.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by mx-eu-01.ponee.io (Postfix) with ESMTP id D33F1180621 for ; Wed, 24 Jan 2018 05:55:32 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id C325E160C4D; Wed, 24 Jan 2018 04:55:32 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id E2DD4160C3A for ; Wed, 24 Jan 2018 05:55:31 +0100 (CET) Received: (qmail 99987 invoked by uid 500); 24 Jan 2018 04:55:31 -0000 Mailing-List: contact commits-help@superset.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@superset.incubator.apache.org Delivered-To: mailing list commits@superset.incubator.apache.org Received: (qmail 99978 invoked by uid 99); 24 Jan 2018 04:55:31 -0000 Received: from ec2-52-202-80-70.compute-1.amazonaws.com (HELO gitbox.apache.org) (52.202.80.70) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 24 Jan 2018 04:55:31 +0000 Received: by gitbox.apache.org (ASF Mail Server at gitbox.apache.org, from userid 33) id 07A0980660; Wed, 24 Jan 2018 04:55:30 +0000 (UTC) Date: Wed, 24 Jan 2018 04:55:29 +0000 To: "commits@superset.apache.org" Subject: [incubator-superset] branch master updated: Use json for imports and exports, not pickle (#4243) MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Message-ID: <151676972987.1237.3376452997978298162@gitbox.apache.org> From: maximebeauchemin@apache.org X-Git-Host: gitbox.apache.org X-Git-Repo: incubator-superset X-Git-Refname: refs/heads/master X-Git-Reftype: branch X-Git-Oldrev: 4b11f45f72dc3d1957d6db4e0147fcd6cb6c59af X-Git-Newrev: 2c72a7ae4fc0a8bac1f037a79efa90e1c5549710 X-Git-Rev: 2c72a7ae4fc0a8bac1f037a79efa90e1c5549710 X-Git-NotificationType: ref_changed_plus_diff X-Git-Multimail-Version: 1.5.dev Auto-Submitted: auto-generated This is an automated email from the ASF dual-hosted git repository. maximebeauchemin pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/incubator-superset.git The following commit(s) were added to refs/heads/master by this push: new 2c72a7a Use json for imports and exports, not pickle (#4243) 2c72a7a is described below commit 2c72a7ae4fc0a8bac1f037a79efa90e1c5549710 Author: timifasubaa <30888507+timifasubaa@users.noreply.github.com> AuthorDate: Tue Jan 23 20:55:27 2018 -0800 Use json for imports and exports, not pickle (#4243) * make superset imports and exports use json, not pickle * fix tests --- superset/models/core.py | 7 +++---- superset/utils.py | 50 ++++++++++++++++++++++++++++++++++++++++++++ superset/views/core.py | 10 ++++----- tests/import_export_tests.py | 33 +++++++++++++++++++++-------- 4 files changed, 81 insertions(+), 19 deletions(-) diff --git a/superset/models/core.py b/superset/models/core.py index 9f26a27..1b71d42 100644 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -9,7 +9,6 @@ from datetime import date, datetime import functools import json import logging -import pickle import textwrap from flask import escape, g, Markup, request @@ -395,7 +394,7 @@ class Dashboard(Model, AuditMixinNullable, ImportMixin): be overridden or just copies over. Slices that belong to this dashboard will be wired to existing tables. This function can be used to import/export dashboards between multiple superset instances. - Audit metadata isn't copies over. + Audit metadata isn't copied over. """ def alter_positions(dashboard, old_to_new_slc_id_dict): """ Updates slice_ids in the position json. @@ -533,10 +532,10 @@ class Dashboard(Model, AuditMixinNullable, ImportMixin): make_transient(eager_datasource) eager_datasources.append(eager_datasource) - return pickle.dumps({ + return json.dumps({ 'dashboards': copied_dashboards, 'datasources': eager_datasources, - }) + }, cls=utils.DashboardEncoder, indent=4) class Database(Model, AuditMixinNullable, ImportMixin): diff --git a/superset/utils.py b/superset/utils.py index 8224843..e28eda3 100644 --- a/superset/utils.py +++ b/superset/utils.py @@ -42,6 +42,7 @@ import sqlalchemy as sa from sqlalchemy import event, exc, select from sqlalchemy.types import TEXT, TypeDecorator + logging.getLogger('MARKDOWN').setLevel(logging.INFO) PY3K = sys.version_info >= (3, 0) @@ -240,6 +241,55 @@ def dttm_from_timtuple(d): d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec) +def decode_dashboards(o): + """ + Function to be passed into json.loads obj_hook parameter + Recreates the dashboard object from a json representation. + """ + import superset.models.core as models + from superset.connectors.sqla.models import ( + SqlaTable, SqlMetric, TableColumn, + ) + + if '__Dashboard__' in o: + d = models.Dashboard() + d.__dict__.update(o['__Dashboard__']) + return d + elif '__Slice__' in o: + d = models.Slice() + d.__dict__.update(o['__Slice__']) + return d + elif '__TableColumn__' in o: + d = TableColumn() + d.__dict__.update(o['__TableColumn__']) + return d + elif '__SqlaTable__' in o: + d = SqlaTable() + d.__dict__.update(o['__SqlaTable__']) + return d + elif '__SqlMetric__' in o: + d = SqlMetric() + d.__dict__.update(o['__SqlMetric__']) + return d + elif '__datetime__' in o: + return datetime.strptime(o['__datetime__'], '%Y-%m-%dT%H:%M:%S') + else: + return o + + +class DashboardEncoder(json.JSONEncoder): + # pylint: disable=E0202 + def default(self, o): + try: + vals = { + k: v for k, v in o.__dict__.items() if k != '_sa_instance_state'} + return {'__{}__'.format(o.__class__.__name__): vals} + except Exception: + if type(o) == datetime: + return {'__datetime__': o.replace(microsecond=0).isoformat()} + return json.JSONEncoder.default(self, o) + + def parse_human_timedelta(s): """ Returns ``datetime.datetime`` from natural language time deltas diff --git a/superset/views/core.py b/superset/views/core.py index ec4cce1..b06492c 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -8,7 +8,6 @@ from datetime import datetime, timedelta import json import logging import os -import pickle import re import time import traceback @@ -601,7 +600,7 @@ class DashboardModelView(SupersetModelView, DeleteMixin): # noqa ids = request.args.getlist('id') return Response( models.Dashboard.export_dashboards(ids), - headers=generate_download_headers('pickle'), + headers=generate_download_headers('json'), mimetype='application/text') return self.render_template( 'superset/export_dashboards.html', @@ -1114,15 +1113,14 @@ class Superset(BaseSupersetView): @has_access @expose('/import_dashboards', methods=['GET', 'POST']) def import_dashboards(self): - """Overrides the dashboards using pickled instances from the file.""" + """Overrides the dashboards using json instances from the file.""" f = request.files.get('file') if request.method == 'POST' and f: current_tt = int(time.time()) - data = pickle.load(f) + data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards) # TODO: import DRUID datasources for table in data['datasources']: - ds_class = ConnectorRegistry.sources.get(table.type) - ds_class.import_obj(table, import_time=current_tt) + type(table).import_obj(table, import_time=current_tt) db.session.commit() for dashboard in data['dashboards']: models.Dashboard.import_obj( diff --git a/tests/import_export_tests.py b/tests/import_export_tests.py index d51b959..245d419 100644 --- a/tests/import_export_tests.py +++ b/tests/import_export_tests.py @@ -5,12 +5,11 @@ from __future__ import print_function from __future__ import unicode_literals import json -import pickle import unittest from sqlalchemy.orm.session import make_transient -from superset import db +from superset import db, utils from superset.connectors.druid.models import ( DruidColumn, DruidDatasource, DruidMetric, ) @@ -205,13 +204,22 @@ class ImportExportTests(SupersetTestCase): .format(birth_dash.id) ) resp = self.client.get(export_dash_url) - exported_dashboards = pickle.loads(resp.data)['dashboards'] + exported_dashboards = json.loads( + resp.data.decode('utf-8'), + object_hook=utils.decode_dashboards, + )['dashboards'] self.assert_dash_equals(birth_dash, exported_dashboards[0]) self.assertEquals( birth_dash.id, - json.loads(exported_dashboards[0].json_metadata)['remote_id']) - - exported_tables = pickle.loads(resp.data)['datasources'] + json.loads( + exported_dashboards[0].json_metadata, + object_hook=utils.decode_dashboards, + )['remote_id']) + + exported_tables = json.loads( + resp.data.decode('utf-8'), + object_hook=utils.decode_dashboards, + )['datasources'] self.assertEquals(1, len(exported_tables)) self.assert_table_equals( self.get_table_by_name('birth_names'), exported_tables[0]) @@ -223,8 +231,12 @@ class ImportExportTests(SupersetTestCase): '/dashboardmodelview/export_dashboards_form?id={}&id={}&action=go' .format(birth_dash.id, world_health_dash.id)) resp = self.client.get(export_dash_url) - exported_dashboards = sorted(pickle.loads(resp.data)['dashboards'], - key=lambda d: d.dashboard_title) + exported_dashboards = sorted( + json.loads( + resp.data.decode('utf-8'), + object_hook=utils.decode_dashboards, + )['dashboards'], + key=lambda d: d.dashboard_title) self.assertEquals(2, len(exported_dashboards)) self.assert_dash_equals(birth_dash, exported_dashboards[0]) self.assertEquals( @@ -239,7 +251,10 @@ class ImportExportTests(SupersetTestCase): ) exported_tables = sorted( - pickle.loads(resp.data)['datasources'], key=lambda t: t.table_name) + json.loads( + resp.data.decode('utf-8'), + object_hook=utils.decode_dashboards)['datasources'], + key=lambda t: t.table_name) self.assertEquals(2, len(exported_tables)) self.assert_table_equals( self.get_table_by_name('birth_names'), exported_tables[0]) -- To stop receiving notification emails like this one, please contact maximebeauchemin@apache.org.