superset-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maximebeauche...@apache.org
Subject [incubator-superset] branch master updated: [api] [database] New, migrate to new FAB API side by side (#7665)
Date Tue, 23 Jul 2019 04:25:36 GMT
This is an automated email from the ASF dual-hosted git repository.

maximebeauchemin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new b65ab51  [api] [database] New, migrate to new FAB API side by side (#7665)
b65ab51 is described below

commit b65ab51b63200139947d9ee4b3ba80ef263a3e88
Author: Daniel Vaz Gaspar <danielvazgaspar@gmail.com>
AuthorDate: Tue Jul 23 05:25:16 2019 +0100

    [api] [database] New, migrate to new FAB API side by side (#7665)
    
    * [api] [database] New, migrate to new FAB API side by side
    
    * [database] view and api module reorganization
    
    * [style] Fix, flake8
    
    * [test] [database] Fix, tests for new database view structure
    
    * [style] [database] black
    
    * [database] Fix, missing apache license
    
    * Migrate databaasync api to the new api
    
    * flake8
    
    * More efficient api call
    
    * Revert querySearch to old API, test
    
    * dummy commit
    
    * revert config.py commit
    
    * Remove unused import
    
    * Remove old API view
    
    * Remove new API view
    
    * Add database filter
    
    * Add database filter
    
    * Add database filter
    
    * Remove comments on js
    
    * Fix JS test
    
    * Fix pylint
    
    * QuerySearch new databaseasync API endpoint
    
    * Fix, query search new api endpoint
    
    * Reintroduce old API for smooth side by side migration
    
    * Better naming and use common hooks for view and API
    
    * black it
    
    * Fix, database mixin must come first
    
    * Fix, lint
---
 .../assets/src/SqlLab/components/QuerySearch.jsx   |   2 +-
 superset/assets/src/components/TableSelector.jsx   |   9 +-
 superset/forms.py                                  | 179 +----------
 superset/views/core.py                             | 352 +--------------------
 superset/views/database/__init__.py                | 228 +++++++++++++
 superset/views/database/api.py                     |  55 ++++
 superset/{ => views/database}/forms.py             |  29 +-
 superset/views/database/views.py                   | 181 +++++++++++
 tests/core_tests.py                                |   2 +-
 tests/security_tests.py                            |   2 +-
 10 files changed, 481 insertions(+), 558 deletions(-)

diff --git a/superset/assets/src/SqlLab/components/QuerySearch.jsx b/superset/assets/src/SqlLab/components/QuerySearch.jsx
index 988f6e3..fcf2f6f 100644
--- a/superset/assets/src/SqlLab/components/QuerySearch.jsx
+++ b/superset/assets/src/SqlLab/components/QuerySearch.jsx
@@ -207,7 +207,7 @@ class QuerySearch extends React.PureComponent {
           <div className="col-sm-2">
             <AsyncSelect
               onChange={this.onChange}
-              dataEndpoint="/databaseasync/api/read?_flt_0_expose_in_sqllab=1"
+              dataEndpoint="/api/v1/database/?q=(filters:!((col:expose_in_sqllab,opr:eq,value:!t)))"
               value={this.state.databaseId}
               mutator={this.dbMutator}
               placeholder={t('Filter by database')}
diff --git a/superset/assets/src/components/TableSelector.jsx b/superset/assets/src/components/TableSelector.jsx
index b7783dd..7d5d65e 100644
--- a/superset/assets/src/components/TableSelector.jsx
+++ b/superset/assets/src/components/TableSelector.jsx
@@ -215,10 +215,11 @@ export default class TableSelector extends React.PureComponent {
     return this.renderSelectRow(
       <AsyncSelect
         dataEndpoint={
-          '/databaseasync/api/' +
-          'read?_flt_0_expose_in_sqllab=1&' +
-          '_oc_DatabaseAsync=database_name&' +
-          '_od_DatabaseAsync=asc'
+          '/api/v1/database/?q=' +
+          '(keys:!(none),' +
+          'columns:!(id,database_name,backend),' +
+          'filters:!((col:expose_in_sqllab,opr:eq,value:!t)),' +
+          'order_columns:database_name,order_direction:asc)'
         }
         onChange={this.onDatabaseChange}
         onAsyncError={() => this.props.handleError(t('Error while fetching database list'))}
diff --git a/superset/forms.py b/superset/forms.py
index 302f466..c11bf7b 100644
--- a/superset/forms.py
+++ b/superset/forms.py
@@ -17,15 +17,9 @@
 # pylint: disable=C,R,W
 """Contains the logic to create cohesive forms on the explore view"""
 from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
-from flask_appbuilder.forms import DynamicForm
-from flask_babel import lazy_gettext as _
-from flask_wtf.file import FileAllowed, FileField, FileRequired
-from wtforms import BooleanField, Field, IntegerField, SelectField, StringField
-from wtforms.ext.sqlalchemy.fields import QuerySelectField
-from wtforms.validators import DataRequired, Length, NumberRange, Optional
+from wtforms import Field
 
-from superset import app, db, security_manager
-from superset.models import core as models
+from superset import app
 
 config = app.config
 
@@ -54,172 +48,3 @@ def filter_not_empty_values(value):
     if not data:
         return None
     return data
-
-
-class CsvToDatabaseForm(DynamicForm):
-    # pylint: disable=E0211
-    def csv_allowed_dbs():
-        csv_allowed_dbs = []
-        csv_enabled_dbs = (
-            db.session.query(models.Database).filter_by(allow_csv_upload=True).all()
-        )
-        for csv_enabled_db in csv_enabled_dbs:
-            if CsvToDatabaseForm.at_least_one_schema_is_allowed(csv_enabled_db):
-                csv_allowed_dbs.append(csv_enabled_db)
-        return csv_allowed_dbs
-
-    @staticmethod
-    def at_least_one_schema_is_allowed(database):
-        """
-        If the user has access to the database or all datasource
-            1. if schemas_allowed_for_csv_upload is empty
-                a) if database does not support schema
-                    user is able to upload csv without specifying schema name
-                b) if database supports schema
-                    user is able to upload csv to any schema
-            2. if schemas_allowed_for_csv_upload is not empty
-                a) if database does not support schema
-                    This situation is impossible and upload will fail
-                b) if database supports schema
-                    user is able to upload to schema in schemas_allowed_for_csv_upload
-        elif the user does not access to the database or all datasource
-            1. if schemas_allowed_for_csv_upload is empty
-                a) if database does not support schema
-                    user is unable to upload csv
-                b) if database supports schema
-                    user is unable to upload csv
-            2. if schemas_allowed_for_csv_upload is not empty
-                a) if database does not support schema
-                    This situation is impossible and user is unable to upload csv
-                b) if database supports schema
-                    user is able to upload to schema in schemas_allowed_for_csv_upload
-        """
-        if (
-            security_manager.database_access(database)
-            or security_manager.all_datasource_access()
-        ):
-            return True
-        schemas = database.get_schema_access_for_csv_upload()
-        if schemas and security_manager.schemas_accessible_by_user(
-            database, schemas, False
-        ):
-            return True
-        return False
-
-    name = StringField(
-        _("Table Name"),
-        description=_("Name of table to be created from csv data."),
-        validators=[DataRequired()],
-        widget=BS3TextFieldWidget(),
-    )
-    csv_file = FileField(
-        _("CSV File"),
-        description=_("Select a CSV file to be uploaded to a database."),
-        validators=[FileRequired(), FileAllowed(["csv"], _("CSV Files Only!"))],
-    )
-    con = QuerySelectField(
-        _("Database"),
-        query_factory=csv_allowed_dbs,
-        get_pk=lambda a: a.id,
-        get_label=lambda a: a.database_name,
-    )
-    schema = StringField(
-        _("Schema"),
-        description=_("Specify a schema (if database flavor supports this)."),
-        validators=[Optional()],
-        widget=BS3TextFieldWidget(),
-    )
-    sep = StringField(
-        _("Delimiter"),
-        description=_("Delimiter used by CSV file (for whitespace use \\s+)."),
-        validators=[DataRequired()],
-        widget=BS3TextFieldWidget(),
-    )
-    if_exists = SelectField(
-        _("Table Exists"),
-        description=_(
-            "If table exists do one of the following: "
-            "Fail (do nothing), Replace (drop and recreate table) "
-            "or Append (insert data)."
-        ),
-        choices=[
-            ("fail", _("Fail")),
-            ("replace", _("Replace")),
-            ("append", _("Append")),
-        ],
-        validators=[DataRequired()],
-    )
-    header = IntegerField(
-        _("Header Row"),
-        description=_(
-            "Row containing the headers to use as "
-            "column names (0 is first line of data). "
-            "Leave empty if there is no header row."
-        ),
-        validators=[Optional(), NumberRange(min=0)],
-        widget=BS3TextFieldWidget(),
-    )
-    index_col = IntegerField(
-        _("Index Column"),
-        description=_(
-            "Column to use as the row labels of the "
-            "dataframe. Leave empty if no index column."
-        ),
-        validators=[Optional(), NumberRange(min=0)],
-        widget=BS3TextFieldWidget(),
-    )
-    mangle_dupe_cols = BooleanField(
-        _("Mangle Duplicate Columns"),
-        description=_('Specify duplicate columns as "X.0, X.1".'),
-    )
-    skipinitialspace = BooleanField(
-        _("Skip Initial Space"), description=_("Skip spaces after delimiter.")
-    )
-    skiprows = IntegerField(
-        _("Skip Rows"),
-        description=_("Number of rows to skip at start of file."),
-        validators=[Optional(), NumberRange(min=0)],
-        widget=BS3TextFieldWidget(),
-    )
-    nrows = IntegerField(
-        _("Rows to Read"),
-        description=_("Number of rows of file to read."),
-        validators=[Optional(), NumberRange(min=0)],
-        widget=BS3TextFieldWidget(),
-    )
-    skip_blank_lines = BooleanField(
-        _("Skip Blank Lines"),
-        description=_(
-            "Skip blank lines rather than interpreting them " "as NaN values."
-        ),
-    )
-    parse_dates = CommaSeparatedListField(
-        _("Parse Dates"),
-        description=_(
-            "A comma separated list of columns that should be " "parsed as dates."
-        ),
-        filters=[filter_not_empty_values],
-    )
-    infer_datetime_format = BooleanField(
-        _("Infer Datetime Format"),
-        description=_("Use Pandas to interpret the datetime format " "automatically."),
-    )
-    decimal = StringField(
-        _("Decimal Character"),
-        default=".",
-        description=_("Character to interpret as decimal point."),
-        validators=[Optional(), Length(min=1, max=1)],
-        widget=BS3TextFieldWidget(),
-    )
-    index = BooleanField(
-        _("Dataframe Index"), description=_("Write dataframe index as a column.")
-    )
-    index_label = StringField(
-        _("Column Label(s)"),
-        description=_(
-            "Column label for index column(s). If None is given "
-            "and Dataframe Index is True, Index Names are used."
-        ),
-        validators=[Optional()],
-        widget=BS3TextFieldWidget(),
-    )
diff --git a/superset/views/core.py b/superset/views/core.py
index 0eece78..c1bb285 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -17,9 +17,7 @@
 # pylint: disable=C,R,W
 from contextlib import closing
 from datetime import datetime, timedelta
-import inspect
 import logging
-import os
 import re
 from typing import Dict, List  # noqa: F401
 from urllib import parse
@@ -35,7 +33,7 @@ from flask import (
     Response,
     url_for,
 )
-from flask_appbuilder import expose, SimpleFormView
+from flask_appbuilder import expose
 from flask_appbuilder.actions import action
 from flask_appbuilder.models.sqla.interface import SQLAInterface
 from flask_appbuilder.security.decorators import has_access, has_access_api
@@ -44,10 +42,8 @@ from flask_babel import gettext as __
 from flask_babel import lazy_gettext as _
 import pandas as pd
 import simplejson as json
-from sqlalchemy import and_, MetaData, or_, select
-from sqlalchemy.exc import IntegrityError
+from sqlalchemy import and_, or_, select
 from werkzeug.routing import BaseConverter
-from werkzeug.utils import secure_filename
 
 from superset import (
     app,
@@ -63,13 +59,12 @@ from superset import (
     viz,
 )
 from superset.connectors.connector_registry import ConnectorRegistry
-from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
+from superset.connectors.sqla.models import AnnotationDatasource
 from superset.exceptions import (
     DatabaseNotFound,
     SupersetException,
     SupersetSecurityException,
 )
-from superset.forms import CsvToDatabaseForm
 from superset.jinja_context import get_template_processor
 from superset.legacy import update_time_range
 import superset.models.core as models
@@ -96,7 +91,6 @@ from .base import (
     json_success,
     SupersetFilter,
     SupersetModelView,
-    YamlExportMixin,
 )
 from .utils import (
     apply_display_max_row_limit,
@@ -200,14 +194,6 @@ class SliceFilter(SupersetFilter):
         return query.filter(self.model.perm.in_(perms))
 
 
-class DatabaseFilter(SupersetFilter):
-    def apply(self, query, func):  # noqa
-        if security_manager.all_database_access():
-            return query
-        perms = self.get_view_menus("database_access")
-        return query.filter(self.model.perm.in_(perms))
-
-
 class DashboardFilter(SupersetFilter):
     """
     List dashboards with the following criteria:
@@ -266,336 +252,8 @@ class DashboardFilter(SupersetFilter):
         return query
 
 
-class DatabaseView(SupersetModelView, DeleteMixin, YamlExportMixin):  # noqa
-    datamodel = SQLAInterface(models.Database)
-
-    list_title = _("Databases")
-    show_title = _("Show Database")
-    add_title = _("Add Database")
-    edit_title = _("Edit Database")
-
-    list_columns = [
-        "database_name",
-        "backend",
-        "allow_run_async",
-        "allow_dml",
-        "allow_csv_upload",
-        "expose_in_sqllab",
-        "creator",
-        "modified",
-    ]
-    order_columns = [
-        "database_name",
-        "allow_run_async",
-        "allow_dml",
-        "modified",
-        "allow_csv_upload",
-        "expose_in_sqllab",
-    ]
-    add_columns = [
-        "database_name",
-        "sqlalchemy_uri",
-        "cache_timeout",
-        "expose_in_sqllab",
-        "allow_run_async",
-        "allow_csv_upload",
-        "allow_ctas",
-        "allow_dml",
-        "force_ctas_schema",
-        "impersonate_user",
-        "allow_multi_schema_metadata_fetch",
-        "extra",
-    ]
-    search_exclude_columns = (
-        "password",
-        "tables",
-        "created_by",
-        "changed_by",
-        "queries",
-        "saved_queries",
-    )
-    edit_columns = add_columns
-    show_columns = [
-        "tables",
-        "cache_timeout",
-        "extra",
-        "database_name",
-        "sqlalchemy_uri",
-        "perm",
-        "created_by",
-        "created_on",
-        "changed_by",
-        "changed_on",
-    ]
-    add_template = "superset/models/database/add.html"
-    edit_template = "superset/models/database/edit.html"
-    base_order = ("changed_on", "desc")
-    description_columns = {
-        "sqlalchemy_uri": utils.markdown(
-            "Refer to the "
-            "[SqlAlchemy docs]"
-            "(https://docs.sqlalchemy.org/en/rel_1_2/core/engines.html#"
-            "database-urls) "
-            "for more information on how to structure your URI.",
-            True,
-        ),
-        "expose_in_sqllab": _("Expose this DB in SQL Lab"),
-        "allow_run_async": _(
-            "Operate the database in asynchronous mode, meaning  "
-            "that the queries are executed on remote workers as opposed "
-            "to on the web server itself. "
-            "This assumes that you have a Celery worker setup as well "
-            "as a results backend. Refer to the installation docs "
-            "for more information."
-        ),
-        "allow_ctas": _("Allow CREATE TABLE AS option in SQL Lab"),
-        "allow_dml": _(
-            "Allow users to run non-SELECT statements "
-            "(UPDATE, DELETE, CREATE, ...) "
-            "in SQL Lab"
-        ),
-        "force_ctas_schema": _(
-            "When allowing CREATE TABLE AS option in SQL Lab, "
-            "this option forces the table to be created in this schema"
-        ),
-        "extra": utils.markdown(
-            "JSON string containing extra configuration elements.<br/>"
-            "1. The ``engine_params`` object gets unpacked into the "
-            "[sqlalchemy.create_engine]"
-            "(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
-            "sqlalchemy.create_engine) call, while the ``metadata_params`` "
-            "gets unpacked into the [sqlalchemy.MetaData]"
-            "(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
-            "#sqlalchemy.schema.MetaData) call.<br/>"
-            "2. The ``metadata_cache_timeout`` is a cache timeout setting "
-            "in seconds for metadata fetch of this database. Specify it as "
-            '**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
-            '"table_cache_timeout": 600}**. '
-            "If unset, cache will not be enabled for the functionality. "
-            "A timeout of 0 indicates that the cache never expires.<br/>"
-            "3. The ``schemas_allowed_for_csv_upload`` is a comma separated list "
-            "of schemas that CSVs are allowed to upload to. "
-            'Specify it as **"schemas_allowed_for_csv_upload": '
-            '["public", "csv_upload"]**. '
-            "If database flavor does not support schema or any schema is allowed "
-            "to be accessed, just leave the list empty",
-            True,
-        ),
-        "impersonate_user": _(
-            "If Presto, all the queries in SQL Lab are going to be executed as the "
-            "currently logged on user who must have permission to run them.<br/>"
-            "If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
-            "service account, but impersonate the currently logged on user "
-            "via hive.server2.proxy.user property."
-        ),
-        "allow_multi_schema_metadata_fetch": _(
-            "Allow SQL Lab to fetch a list of all tables and all views across "
-            "all database schemas. For large data warehouse with thousands of "
-            "tables, this can be expensive and put strain on the system."
-        ),
-        "cache_timeout": _(
-            "Duration (in seconds) of the caching timeout for charts of this database. "
-            "A timeout of 0 indicates that the cache never expires. "
-            "Note this defaults to the global timeout if undefined."
-        ),
-        "allow_csv_upload": _(
-            "If selected, please set the schemas allowed for csv upload in Extra."
-        ),
-    }
-    base_filters = [["id", DatabaseFilter, lambda: []]]
-    label_columns = {
-        "expose_in_sqllab": _("Expose in SQL Lab"),
-        "allow_ctas": _("Allow CREATE TABLE AS"),
-        "allow_dml": _("Allow DML"),
-        "force_ctas_schema": _("CTAS Schema"),
-        "database_name": _("Database"),
-        "creator": _("Creator"),
-        "changed_on_": _("Last Changed"),
-        "sqlalchemy_uri": _("SQLAlchemy URI"),
-        "cache_timeout": _("Chart Cache Timeout"),
-        "extra": _("Extra"),
-        "allow_run_async": _("Asynchronous Query Execution"),
-        "impersonate_user": _("Impersonate the logged on user"),
-        "allow_csv_upload": _("Allow Csv Upload"),
-        "modified": _("Modified"),
-        "allow_multi_schema_metadata_fetch": _("Allow Multi Schema Metadata Fetch"),
-        "backend": _("Backend"),
-    }
-
-    def pre_add(self, db):
-        self.check_extra(db)
-        db.set_sqlalchemy_uri(db.sqlalchemy_uri)
-        security_manager.add_permission_view_menu("database_access", db.perm)
-        # adding a new database we always want to force refresh schema list
-        for schema in db.get_all_schema_names():
-            security_manager.add_permission_view_menu(
-                "schema_access", security_manager.get_schema_perm(db, schema)
-            )
-
-    def pre_update(self, db):
-        self.pre_add(db)
-
-    def pre_delete(self, obj):
-        if obj.tables:
-            raise SupersetException(
-                Markup(
-                    "Cannot delete a database that has tables attached. "
-                    "Here's the list of associated tables: "
-                    + ", ".join("{}".format(o) for o in obj.tables)
-                )
-            )
-
-    def _delete(self, pk):
-        DeleteMixin._delete(self, pk)
-
-    def check_extra(self, db):
-        # this will check whether json.loads(extra) can succeed
-        try:
-            extra = db.get_extra()
-        except Exception as e:
-            raise Exception("Extra field cannot be decoded by JSON. {}".format(str(e)))
-
-        # this will check whether 'metadata_params' is configured correctly
-        metadata_signature = inspect.signature(MetaData)
-        for key in extra.get("metadata_params", {}):
-            if key not in metadata_signature.parameters:
-                raise Exception(
-                    "The metadata_params in Extra field "
-                    "is not configured correctly. The key "
-                    "{} is invalid.".format(key)
-                )
-
-
-appbuilder.add_link(
-    "Import Dashboards",
-    label=__("Import Dashboards"),
-    href="/superset/import_dashboards",
-    icon="fa-cloud-upload",
-    category="Manage",
-    category_label=__("Manage"),
-    category_icon="fa-wrench",
-)
-
-
-appbuilder.add_view(
-    DatabaseView,
-    "Databases",
-    label=__("Databases"),
-    icon="fa-database",
-    category="Sources",
-    category_label=__("Sources"),
-    category_icon="fa-database",
-)
-
-
-class DatabaseAsync(DatabaseView):
-    list_columns = [
-        "id",
-        "database_name",
-        "expose_in_sqllab",
-        "allow_ctas",
-        "force_ctas_schema",
-        "allow_run_async",
-        "allow_dml",
-        "allow_multi_schema_metadata_fetch",
-        "allow_csv_upload",
-        "allows_subquery",
-        "backend",
-    ]
-
-
-appbuilder.add_view_no_menu(DatabaseAsync)
-
-
-class CsvToDatabaseView(SimpleFormView):
-    form = CsvToDatabaseForm
-    form_template = "superset/form_view/csv_to_database_view/edit.html"
-    form_title = _("CSV to Database configuration")
-    add_columns = ["database", "schema", "table_name"]
-
-    def form_get(self, form):
-        form.sep.data = ","
-        form.header.data = 0
-        form.mangle_dupe_cols.data = True
-        form.skipinitialspace.data = False
-        form.skip_blank_lines.data = True
-        form.infer_datetime_format.data = True
-        form.decimal.data = "."
-        form.if_exists.data = "fail"
-
-    def form_post(self, form):
-        database = form.con.data
-        schema_name = form.schema.data or ""
-
-        if not self.is_schema_allowed(database, schema_name):
-            message = _(
-                'Database "{0}" Schema "{1}" is not allowed for csv uploads. '
-                "Please contact Superset Admin".format(
-                    database.database_name, schema_name
-                )
-            )
-            flash(message, "danger")
-            return redirect("/csvtodatabaseview/form")
-
-        csv_file = form.csv_file.data
-        form.csv_file.data.filename = secure_filename(form.csv_file.data.filename)
-        csv_filename = form.csv_file.data.filename
-        path = os.path.join(config["UPLOAD_FOLDER"], csv_filename)
-        try:
-            utils.ensure_path_exists(config["UPLOAD_FOLDER"])
-            csv_file.save(path)
-            table = SqlaTable(table_name=form.name.data)
-            table.database = form.data.get("con")
-            table.database_id = table.database.id
-            table.database.db_engine_spec.create_table_from_csv(form, table)
-        except Exception as e:
-            try:
-                os.remove(path)
-            except OSError:
-                pass
-            message = (
-                "Table name {} already exists. Please pick another".format(
-                    form.name.data
-                )
-                if isinstance(e, IntegrityError)
-                else str(e)
-            )
-            flash(message, "danger")
-            stats_logger.incr("failed_csv_upload")
-            return redirect("/csvtodatabaseview/form")
-
-        os.remove(path)
-        # Go back to welcome page / splash screen
-        db_name = table.database.database_name
-        message = _(
-            'CSV file "{0}" uploaded to table "{1}" in '
-            'database "{2}"'.format(csv_filename, form.name.data, db_name)
-        )
-        flash(message, "info")
-        stats_logger.incr("successful_csv_upload")
-        return redirect("/tablemodelview/list/")
-
-    def is_schema_allowed(self, database, schema):
-        if not database.allow_csv_upload:
-            return False
-        schemas = database.get_schema_access_for_csv_upload()
-        if schemas:
-            return schema in schemas
-        return (
-            security_manager.database_access(database)
-            or security_manager.all_datasource_access()
-        )
-
-
-appbuilder.add_view_no_menu(CsvToDatabaseView)
-
-
-class DatabaseTablesAsync(DatabaseView):
-    list_columns = ["id", "all_table_names_in_database", "all_schema_names"]
-
-
-appbuilder.add_view_no_menu(DatabaseTablesAsync)
-
+from .database import api as database_api  # noqa
+from .database import views as in_views  # noqa
 
 if config.get("ENABLE_ACCESS_REQUEST"):
 
diff --git a/superset/views/database/__init__.py b/superset/views/database/__init__.py
new file mode 100644
index 0000000..15b0d52
--- /dev/null
+++ b/superset/views/database/__init__.py
@@ -0,0 +1,228 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=C,R,W
+import inspect
+
+from flask import Markup
+from flask_babel import lazy_gettext as _
+from sqlalchemy import MetaData
+
+from superset import security_manager
+from superset.exceptions import SupersetException
+from superset.utils import core as utils
+from superset.views.base import SupersetFilter
+
+
+class DatabaseFilter(SupersetFilter):
+    def apply(self, query, func):  # noqa
+        if security_manager.all_database_access():
+            return query
+        perms = self.get_view_menus("database_access")
+        return query.filter(self.model.perm.in_(perms))
+
+
+class DatabaseMixin:  # noqa
+    list_title = _("Databases")
+    show_title = _("Show Database")
+    add_title = _("Add Database")
+    edit_title = _("Edit Database")
+
+    list_columns = [
+        "database_name",
+        "backend",
+        "allow_run_async",
+        "allow_dml",
+        "allow_csv_upload",
+        "expose_in_sqllab",
+        "creator",
+        "modified",
+    ]
+    order_columns = [
+        "database_name",
+        "allow_run_async",
+        "allow_dml",
+        "modified",
+        "allow_csv_upload",
+        "expose_in_sqllab",
+    ]
+    add_columns = [
+        "database_name",
+        "sqlalchemy_uri",
+        "cache_timeout",
+        "expose_in_sqllab",
+        "allow_run_async",
+        "allow_csv_upload",
+        "allow_ctas",
+        "allow_dml",
+        "force_ctas_schema",
+        "impersonate_user",
+        "allow_multi_schema_metadata_fetch",
+        "extra",
+    ]
+    search_exclude_columns = (
+        "password",
+        "tables",
+        "created_by",
+        "changed_by",
+        "queries",
+        "saved_queries",
+    )
+    edit_columns = add_columns
+    show_columns = [
+        "tables",
+        "cache_timeout",
+        "extra",
+        "database_name",
+        "sqlalchemy_uri",
+        "perm",
+        "created_by",
+        "created_on",
+        "changed_by",
+        "changed_on",
+    ]
+    base_order = ("changed_on", "desc")
+    description_columns = {
+        "sqlalchemy_uri": utils.markdown(
+            "Refer to the "
+            "[SqlAlchemy docs]"
+            "(https://docs.sqlalchemy.org/en/rel_1_2/core/engines.html#"
+            "database-urls) "
+            "for more information on how to structure your URI.",
+            True,
+        ),
+        "expose_in_sqllab": _("Expose this DB in SQL Lab"),
+        "allow_run_async": _(
+            "Operate the database in asynchronous mode, meaning  "
+            "that the queries are executed on remote workers as opposed "
+            "to on the web server itself. "
+            "This assumes that you have a Celery worker setup as well "
+            "as a results backend. Refer to the installation docs "
+            "for more information."
+        ),
+        "allow_ctas": _("Allow CREATE TABLE AS option in SQL Lab"),
+        "allow_dml": _(
+            "Allow users to run non-SELECT statements "
+            "(UPDATE, DELETE, CREATE, ...) "
+            "in SQL Lab"
+        ),
+        "force_ctas_schema": _(
+            "When allowing CREATE TABLE AS option in SQL Lab, "
+            "this option forces the table to be created in this schema"
+        ),
+        "extra": utils.markdown(
+            "JSON string containing extra configuration elements.<br/>"
+            "1. The ``engine_params`` object gets unpacked into the "
+            "[sqlalchemy.create_engine]"
+            "(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
+            "sqlalchemy.create_engine) call, while the ``metadata_params`` "
+            "gets unpacked into the [sqlalchemy.MetaData]"
+            "(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
+            "#sqlalchemy.schema.MetaData) call.<br/>"
+            "2. The ``metadata_cache_timeout`` is a cache timeout setting "
+            "in seconds for metadata fetch of this database. Specify it as "
+            '**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
+            '"table_cache_timeout": 600}**. '
+            "If unset, cache will not be enabled for the functionality. "
+            "A timeout of 0 indicates that the cache never expires.<br/>"
+            "3. The ``schemas_allowed_for_csv_upload`` is a comma separated list "
+            "of schemas that CSVs are allowed to upload to. "
+            'Specify it as **"schemas_allowed_for_csv_upload": '
+            '["public", "csv_upload"]**. '
+            "If database flavor does not support schema or any schema is allowed "
+            "to be accessed, just leave the list empty",
+            True,
+        ),
+        "impersonate_user": _(
+            "If Presto, all the queries in SQL Lab are going to be executed as the "
+            "currently logged on user who must have permission to run them.<br/>"
+            "If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
+            "service account, but impersonate the currently logged on user "
+            "via hive.server2.proxy.user property."
+        ),
+        "allow_multi_schema_metadata_fetch": _(
+            "Allow SQL Lab to fetch a list of all tables and all views across "
+            "all database schemas. For large data warehouse with thousands of "
+            "tables, this can be expensive and put strain on the system."
+        ),
+        "cache_timeout": _(
+            "Duration (in seconds) of the caching timeout for charts of this database. "
+            "A timeout of 0 indicates that the cache never expires. "
+            "Note this defaults to the global timeout if undefined."
+        ),
+        "allow_csv_upload": _(
+            "If selected, please set the schemas allowed for csv upload in Extra."
+        ),
+    }
+    base_filters = [["id", DatabaseFilter, lambda: []]]
+    label_columns = {
+        "expose_in_sqllab": _("Expose in SQL Lab"),
+        "allow_ctas": _("Allow CREATE TABLE AS"),
+        "allow_dml": _("Allow DML"),
+        "force_ctas_schema": _("CTAS Schema"),
+        "database_name": _("Database"),
+        "creator": _("Creator"),
+        "changed_on_": _("Last Changed"),
+        "sqlalchemy_uri": _("SQLAlchemy URI"),
+        "cache_timeout": _("Chart Cache Timeout"),
+        "extra": _("Extra"),
+        "allow_run_async": _("Asynchronous Query Execution"),
+        "impersonate_user": _("Impersonate the logged on user"),
+        "allow_csv_upload": _("Allow Csv Upload"),
+        "modified": _("Modified"),
+        "allow_multi_schema_metadata_fetch": _("Allow Multi Schema Metadata Fetch"),
+        "backend": _("Backend"),
+    }
+
+    def pre_add(self, db):
+        self.check_extra(db)
+        db.set_sqlalchemy_uri(db.sqlalchemy_uri)
+        security_manager.add_permission_view_menu("database_access", db.perm)
+        # adding a new database we always want to force refresh schema list
+        for schema in db.get_all_schema_names():
+            security_manager.add_permission_view_menu(
+                "schema_access", security_manager.get_schema_perm(db, schema)
+            )
+
+    def pre_update(self, db):
+        self.pre_add(db)
+
+    def pre_delete(self, obj):
+        if obj.tables:
+            raise SupersetException(
+                Markup(
+                    "Cannot delete a database that has tables attached. "
+                    "Here's the list of associated tables: "
+                    + ", ".join("{}".format(o) for o in obj.tables)
+                )
+            )
+
+    def check_extra(self, db):
+        # this will check whether json.loads(extra) can succeed
+        try:
+            extra = db.get_extra()
+        except Exception as e:
+            raise Exception("Extra field cannot be decoded by JSON. {}".format(str(e)))
+
+        # this will check whether 'metadata_params' is configured correctly
+        metadata_signature = inspect.signature(MetaData)
+        for key in extra.get("metadata_params", {}):
+            if key not in metadata_signature.parameters:
+                raise Exception(
+                    "The metadata_params in Extra field "
+                    "is not configured correctly. The key "
+                    "{} is invalid.".format(key)
+                )
diff --git a/superset/views/database/api.py b/superset/views/database/api.py
new file mode 100644
index 0000000..dea17ba
--- /dev/null
+++ b/superset/views/database/api.py
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from flask_appbuilder import ModelRestApi
+from flask_appbuilder.models.sqla.interface import SQLAInterface
+
+from superset import appbuilder
+import superset.models.core as models
+from . import DatabaseFilter, DatabaseMixin
+
+
+class DatabaseRestApi(DatabaseMixin, ModelRestApi):
+    datamodel = SQLAInterface(models.Database)
+
+    class_permission_name = "DatabaseAsync"
+    method_permission_name = {
+        "get_list": "list",
+        "get": "show",
+        "post": "add",
+        "put": "edit",
+        "delete": "delete",
+        "info": "list",
+    }
+    resource_name = "database"
+    allow_browser_login = True
+    base_filters = [["id", DatabaseFilter, lambda: []]]
+    list_columns = [
+        "id",
+        "database_name",
+        "expose_in_sqllab",
+        "allow_ctas",
+        "force_ctas_schema",
+        "allow_run_async",
+        "allow_dml",
+        "allow_multi_schema_metadata_fetch",
+        "allow_csv_upload",
+        "allows_subquery",
+        "backend",
+    ]
+
+
+appbuilder.add_api(DatabaseRestApi)
diff --git a/superset/forms.py b/superset/views/database/forms.py
similarity index 91%
copy from superset/forms.py
copy to superset/views/database/forms.py
index 302f466..edce025 100644
--- a/superset/forms.py
+++ b/superset/views/database/forms.py
@@ -20,42 +20,17 @@ from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
 from flask_appbuilder.forms import DynamicForm
 from flask_babel import lazy_gettext as _
 from flask_wtf.file import FileAllowed, FileField, FileRequired
-from wtforms import BooleanField, Field, IntegerField, SelectField, StringField
+from wtforms import BooleanField, IntegerField, SelectField, StringField
 from wtforms.ext.sqlalchemy.fields import QuerySelectField
 from wtforms.validators import DataRequired, Length, NumberRange, Optional
 
 from superset import app, db, security_manager
+from superset.forms import CommaSeparatedListField, filter_not_empty_values
 from superset.models import core as models
 
 config = app.config
 
 
-class CommaSeparatedListField(Field):
-    widget = BS3TextFieldWidget()
-
-    def _value(self):
-        if self.data:
-            return u", ".join(self.data)
-        else:
-            return u""
-
-    def process_formdata(self, valuelist):
-        if valuelist:
-            self.data = [x.strip() for x in valuelist[0].split(",")]
-        else:
-            self.data = []
-
-
-def filter_not_empty_values(value):
-    """Returns a list of non empty values or None"""
-    if not value:
-        return None
-    data = [x for x in value if x]
-    if not data:
-        return None
-    return data
-
-
 class CsvToDatabaseForm(DynamicForm):
     # pylint: disable=E0211
     def csv_allowed_dbs():
diff --git a/superset/views/database/views.py b/superset/views/database/views.py
new file mode 100644
index 0000000..19fe490
--- /dev/null
+++ b/superset/views/database/views.py
@@ -0,0 +1,181 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=C,R,W
+import os
+
+from flask import flash, redirect
+from flask_appbuilder import SimpleFormView
+from flask_appbuilder.models.sqla.interface import SQLAInterface
+from flask_babel import gettext as __
+from flask_babel import lazy_gettext as _
+from sqlalchemy.exc import IntegrityError
+from werkzeug.utils import secure_filename
+
+from superset import app, appbuilder, security_manager
+from superset.connectors.sqla.models import SqlaTable
+import superset.models.core as models
+from superset.utils import core as utils
+from superset.views.base import DeleteMixin, SupersetModelView, YamlExportMixin
+from . import DatabaseMixin
+from .forms import CsvToDatabaseForm
+
+
+config = app.config
+stats_logger = config.get("STATS_LOGGER")
+
+
+class DatabaseView(
+    DatabaseMixin, SupersetModelView, DeleteMixin, YamlExportMixin
+):  # noqa
+    datamodel = SQLAInterface(models.Database)
+
+    add_template = "superset/models/database/add.html"
+    edit_template = "superset/models/database/edit.html"
+
+    def _delete(self, pk):
+        DeleteMixin._delete(self, pk)
+
+
+appbuilder.add_link(
+    "Import Dashboards",
+    label=__("Import Dashboards"),
+    href="/superset/import_dashboards",
+    icon="fa-cloud-upload",
+    category="Manage",
+    category_label=__("Manage"),
+    category_icon="fa-wrench",
+)
+
+
+appbuilder.add_view(
+    DatabaseView,
+    "Databases",
+    label=__("Databases"),
+    icon="fa-database",
+    category="Sources",
+    category_label=__("Sources"),
+    category_icon="fa-database",
+)
+
+
+class CsvToDatabaseView(SimpleFormView):
+    form = CsvToDatabaseForm
+    form_template = "superset/form_view/csv_to_database_view/edit.html"
+    form_title = _("CSV to Database configuration")
+    add_columns = ["database", "schema", "table_name"]
+
+    def form_get(self, form):
+        form.sep.data = ","
+        form.header.data = 0
+        form.mangle_dupe_cols.data = True
+        form.skipinitialspace.data = False
+        form.skip_blank_lines.data = True
+        form.infer_datetime_format.data = True
+        form.decimal.data = "."
+        form.if_exists.data = "fail"
+
+    def form_post(self, form):
+        database = form.con.data
+        schema_name = form.schema.data or ""
+
+        if not self.is_schema_allowed(database, schema_name):
+            message = _(
+                'Database "{0}" Schema "{1}" is not allowed for csv uploads. '
+                "Please contact Superset Admin".format(
+                    database.database_name, schema_name
+                )
+            )
+            flash(message, "danger")
+            return redirect("/csvtodatabaseview/form")
+
+        csv_file = form.csv_file.data
+        form.csv_file.data.filename = secure_filename(form.csv_file.data.filename)
+        csv_filename = form.csv_file.data.filename
+        path = os.path.join(config["UPLOAD_FOLDER"], csv_filename)
+        try:
+            utils.ensure_path_exists(config["UPLOAD_FOLDER"])
+            csv_file.save(path)
+            table = SqlaTable(table_name=form.name.data)
+            table.database = form.data.get("con")
+            table.database_id = table.database.id
+            table.database.db_engine_spec.create_table_from_csv(form, table)
+        except Exception as e:
+            try:
+                os.remove(path)
+            except OSError:
+                pass
+            message = (
+                "Table name {} already exists. Please pick another".format(
+                    form.name.data
+                )
+                if isinstance(e, IntegrityError)
+                else str(e)
+            )
+            flash(message, "danger")
+            stats_logger.incr("failed_csv_upload")
+            return redirect("/csvtodatabaseview/form")
+
+        os.remove(path)
+        # Go back to welcome page / splash screen
+        db_name = table.database.database_name
+        message = _(
+            'CSV file "{0}" uploaded to table "{1}" in '
+            'database "{2}"'.format(csv_filename, form.name.data, db_name)
+        )
+        flash(message, "info")
+        stats_logger.incr("successful_csv_upload")
+        return redirect("/tablemodelview/list/")
+
+    def is_schema_allowed(self, database, schema):
+        if not database.allow_csv_upload:
+            return False
+        schemas = database.get_schema_access_for_csv_upload()
+        if schemas:
+            return schema in schemas
+        return (
+            security_manager.database_access(database)
+            or security_manager.all_datasource_access()
+        )
+
+
+appbuilder.add_view_no_menu(CsvToDatabaseView)
+
+
+class DatabaseTablesAsync(DatabaseView):
+    list_columns = ["id", "all_table_names_in_database", "all_schema_names"]
+
+
+appbuilder.add_view_no_menu(DatabaseTablesAsync)
+
+
+class DatabaseAsync(DatabaseView):
+    list_columns = [
+        "id",
+        "database_name",
+        "expose_in_sqllab",
+        "allow_ctas",
+        "force_ctas_schema",
+        "allow_run_async",
+        "allow_dml",
+        "allow_multi_schema_metadata_fetch",
+        "allow_csv_upload",
+        "allows_subquery",
+        "backend",
+    ]
+
+
+appbuilder.add_view_no_menu(DatabaseAsync)
diff --git a/tests/core_tests.py b/tests/core_tests.py
index b401c8c..2cb2d91 100644
--- a/tests/core_tests.py
+++ b/tests/core_tests.py
@@ -39,7 +39,7 @@ from superset.db_engine_specs.mssql import MssqlEngineSpec
 from superset.models import core as models
 from superset.models.sql_lab import Query
 from superset.utils import core as utils
-from superset.views.core import DatabaseView
+from superset.views.database.views import DatabaseView
 from .base_tests import SupersetTestCase
 from .fixtures.pyodbcRow import Row
 
diff --git a/tests/security_tests.py b/tests/security_tests.py
index 3b210a1..36046e8 100644
--- a/tests/security_tests.py
+++ b/tests/security_tests.py
@@ -98,7 +98,7 @@ class RolePermissionTests(SupersetTestCase):
         self.assert_cannot_write("UserDBModelView", perm_set)
 
     def assert_can_admin(self, perm_set):
-        self.assert_can_all("DatabaseAsync", perm_set)
+        self.assert_can_read("DatabaseAsync", perm_set)
         self.assert_can_all("DatabaseView", perm_set)
         self.assert_can_all("DruidClusterModelView", perm_set)
         self.assert_can_all("RoleModelView", perm_set)


Mime
View raw message