Skip to content

Commit

Permalink
Linting
Browse files Browse the repository at this point in the history
  • Loading branch information
Fabian committed Dec 4, 2017
1 parent 20a1fd0 commit bc4f9ac
Show file tree
Hide file tree
Showing 11 changed files with 85 additions and 83 deletions.
35 changes: 18 additions & 17 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,16 @@
from __future__ import print_function
from __future__ import unicode_literals

import logging
from datetime import datetime
from pathlib2 import Path
import logging
from subprocess import Popen
from sys import stdout
import yaml

from colorama import Fore, Style
from flask_migrate import MigrateCommand
from flask_script import Manager
from pathlib2 import Path
import yaml

from superset import app, db, dict_import_export_util, security, utils

Expand Down Expand Up @@ -183,15 +183,15 @@ def refresh_druid(datasource, merge):

@manager.option(
'-p', '--path', dest='path',
help='Path to a single YAML file or path containing multiple YAML '
help='Path to a single YAML file or path containing multiple YAML '
'files to import (*.yaml or *.yml)')
@manager.option(
'-s', '--sync', dest='sync', default='',
help='comma seperated list of element types to synchronize '
'e.g. "metrics,columns" deletes metrics and columns in the DB '
'that are not specified in the YAML file')
@manager.option(
'-r', '--recursive', dest='recursive', action="store_true",
'-r', '--recursive', dest='recursive', action='store_true',
help='recursively search the path for yaml files')
def import_datasources(path, sync, recursive=False):
"""Import datasources from YAML"""
Expand All @@ -207,29 +207,30 @@ def import_datasources(path, sync, recursive=False):
files.extend(p.rglob('*.yaml'))
files.extend(p.rglob('*.yml'))
for f in files:
logging.info("Importing datasources from file %s", f)
logging.info('Importing datasources from file %s', f)
try:
with f.open() as data_stream:
dict_import_export_util.import_from_dict(db.session,
yaml.load(data_stream),
sync=sync_array)
with f.open() as data_stream:
dict_import_export_util.import_from_dict(
db.session,
yaml.load(data_stream),
sync=sync_array)
except Exception as e:
logging.error("Error when importing datasources from file %s", f)
logging.error('Error when importing datasources from file %s', f)
logging.error(e)


@manager.option(
'-f', '--datasource-file', default=None, dest='datasource_file',
help="Specify the the file to export to")
help='Specify the the file to export to')
@manager.option(
'-p', '--print', action='store_true', dest='print_stdout',
help="Print YAML to stdout")
help='Print YAML to stdout')
@manager.option(
'-b', '--back-references', action='store_true', dest='back_references',
help="Include parent back references")
help='Include parent back references')
@manager.option(
'-d', '--include-defaults', action='store_true', dest='include_defaults',
help="Include fields containing defaults")
help='Include fields containing defaults')
def export_datasources(print_stdout, datasource_file,
back_references, include_defaults):
"""Export datasources to YAML"""
Expand All @@ -241,14 +242,14 @@ def export_datasources(print_stdout, datasource_file,
if print_stdout or not datasource_file:
yaml.safe_dump(data, stdout, default_flow_style=False)
if datasource_file:
logging.info("Exporting datasources to %s", datasource_file)
logging.info('Exporting datasources to %s', datasource_file)
with open(datasource_file, 'w') as data_stream:
yaml.safe_dump(data, data_stream, default_flow_style=False)


@manager.option(
'-b', '--back-references', action='store_false',
help="Include parent back references")
help='Include parent back references')
def export_datasource_schema(back_references):
"""Export datasource YAML schema to stdout"""
data = dict_import_export_util.export_schema_to_dict(
Expand Down
5 changes: 2 additions & 3 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import logging
from multiprocessing.pool import ThreadPool

from sqlalchemy.schema import UniqueConstraint
from dateutil.parser import parse as dparse
from flask import escape, Markup
from flask_appbuilder import Model
Expand All @@ -30,7 +29,7 @@
from superset import conf, db, import_util, sm, utils
from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
from superset.models.helpers import (
AuditMixinNullable, ImportMixin, QueryResult, set_perm
AuditMixinNullable, ImportMixin, QueryResult, set_perm,
)
from superset.utils import (
DimSelector, DTTM_ALIAS, flasher, MetricPermException,
Expand Down Expand Up @@ -242,7 +241,7 @@ class DruidColumn(Model, BaseColumn):
export_fields = (
'datasource_id', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json', 'verbose_name'
'description', 'dimension_spec_json', 'verbose_name',
)
export_parent = 'datasource'

Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/druid/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from superset.views.base import (
BaseSupersetView, DatasourceFilter, DeleteMixin,
get_datasource_exist_error_mgs, ListWidgetWithCheckboxes, SupersetModelView,
validate_json, YamlExportMixin
validate_json, YamlExportMixin,
)
from . import models

Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, or_,
select, String, Text,
)
from sqlalchemy.schema import UniqueConstraint
from sqlalchemy.orm import backref, relationship
from sqlalchemy.schema import UniqueConstraint
from sqlalchemy.sql import column, literal_column, table, text
from sqlalchemy.sql.expression import TextAsFrom
import sqlparse
Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/sqla/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from superset.utils import has_access
from superset.views.base import (
DatasourceFilter, DeleteMixin, get_datasource_exist_error_mgs,
ListWidgetWithCheckboxes, SupersetModelView, YamlExportMixin
ListWidgetWithCheckboxes, SupersetModelView, YamlExportMixin,
)
from . import models

Expand Down
33 changes: 17 additions & 16 deletions superset/dict_import_export_util.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import logging

from superset.models.core import Database
from superset.connectors.druid.models import DruidCluster
from superset.models.core import Database


DATABASES_KEY = 'databases'
DRUID_CLUSTERS_KEY = 'druid_clusters'
Expand All @@ -26,17 +27,17 @@ def export_to_dict(session,
back_references,
include_defaults):
"""Exports databases and druid clusters to a dictionary"""
logging.info("Starting export")
logging.info('Starting export')
dbs = session.query(Database)
databases = [database.export_to_dict(recursive=recursive,
include_parent_ref=back_references,
include_defaults=include_defaults) for database in dbs]
logging.info("Exported %d %s", len(databases), DATABASES_KEY)
logging.info('Exported %d %s', len(databases), DATABASES_KEY)
cls = session.query(DruidCluster)
clusters = [cluster.export_to_dict(recursive=recursive,
include_parent_ref=back_references,
include_defaults=include_defaults) for cluster in cls]
logging.info("Exported %d %s", len(clusters), DRUID_CLUSTERS_KEY)
logging.info('Exported %d %s', len(clusters), DRUID_CLUSTERS_KEY)
data = dict()
if databases:
data[DATABASES_KEY] = databases
Expand All @@ -48,17 +49,17 @@ def export_to_dict(session,
def import_from_dict(session, data, sync=[]):
"""Imports databases and druid clusters from dictionary"""
if isinstance(data, dict):
logging.info("Importing %d %s",
len(data.get(DATABASES_KEY, [])),
DATABASES_KEY)
for database in data.get(DATABASES_KEY, []):
Database.import_from_dict(session, database, sync=sync)
logging.info('Importing %d %s',
len(data.get(DATABASES_KEY, [])),
DATABASES_KEY)
for database in data.get(DATABASES_KEY, []):
Database.import_from_dict(session, database, sync=sync)

logging.info("Importing %d %s",
len(data.get(DRUID_CLUSTERS_KEY, [])),
DRUID_CLUSTERS_KEY)
for datasource in data.get(DRUID_CLUSTERS_KEY, []):
DruidCluster.import_from_dict(session, datasource, sync=sync)
session.commit()
logging.info('Importing %d %s',
len(data.get(DRUID_CLUSTERS_KEY, [])),
DRUID_CLUSTERS_KEY)
for datasource in data.get(DRUID_CLUSTERS_KEY, []):
DruidCluster.import_from_dict(session, datasource, sync=sync)
session.commit()
else:
logging.info("Supplied object is not a dictionary.")
logging.info('Supplied object is not a dictionary.')
1 change: 0 additions & 1 deletion superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,7 +546,6 @@ class Database(Model, AuditMixinNullable, ImportMixin):
type = 'table'
__table_args__ = (UniqueConstraint('database_name'),)


id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
Expand Down
31 changes: 15 additions & 16 deletions superset/models/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,18 @@

from datetime import datetime
import json
import yaml
import logging
import re
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy import and_, or_
from sqlalchemy import UniqueConstraint

from flask import escape, Markup
from flask_appbuilder.models.decorators import renders
from flask_appbuilder.models.mixins import AuditMixin
import humanize
import sqlalchemy as sa
from sqlalchemy import and_, or_, UniqueConstraint
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm.exc import MultipleResultsFound
import yaml

from superset import sm
from superset.utils import QueryStatus
Expand Down Expand Up @@ -62,7 +61,7 @@ def export_schema(cls, recursive=True, include_parent_ref=False):
if parent_ref:
parent_excludes = {c.name for c in parent_ref.local_columns}

def formatter(c): return ("{0} Default ({1})".format(
def formatter(c): return ('{0} Default ({1})'.format(
str(c.type), c.default.arg) if c.default else str(c.type))

schema = {c.name: formatter(c) for c in cls.__table__.columns
Expand Down Expand Up @@ -117,23 +116,23 @@ def import_from_dict(cls, session, dict_rep, parent=None,
try:
obj_query = session.query(cls).filter(and_(*filters))
obj = obj_query.one_or_none()
except MultipleResultsFound, e:
except MultipleResultsFound as e:
logging.error('Error importing %s \n %s \n %s', cls.__name__,
str(obj_query),
yaml.safe_dump(dict_rep))
str(obj_query),
yaml.safe_dump(dict_rep))
raise e

if not obj:
is_new_obj = True
# Create new DB object
obj = cls(**dict_rep)
logging.info("Importing new %s %s", obj.__tablename__, str(obj))
logging.info('Importing new %s %s', obj.__tablename__, str(obj))
if cls.export_parent and parent:
setattr(obj, cls.export_parent, parent)
session.add(obj)
else:
is_new_obj = False
logging.info("Updating %s %s", obj.__tablename__, str(obj))
logging.info('Updating %s %s', obj.__tablename__, str(obj))
# Update columns
for k, v in dict_rep.items():
setattr(obj, k, v)
Expand All @@ -145,20 +144,20 @@ def import_from_dict(cls, session, dict_rep, parent=None,
added = []
for c_obj in new_children.get(c, []):
added.append(child_class.import_from_dict(session=session,
dict_rep=c_obj,
parent=obj,
sync=sync))
dict_rep=c_obj,
parent=obj,
sync=sync))
# If children should get synced, delete the ones that did not
# get updated.
if c in sync and not is_new_obj:
back_refs = child_class._parent_foreign_key_mappings()
delete_filters = [getattr(child_class, k) ==
getattr(obj, back_refs.get(k))
for k in back_refs.keys()]
to_delete = set(session.query(child_class)
.filter(and_(*delete_filters))).difference(set(added))
to_delete = set(session.query(child_class).filter(
and_(*delete_filters))).difference(set(added))
for o in to_delete:
logging.info("Deleting %s %s", c, str(obj))
logging.info('Deleting %s %s', c, str(obj))
session.delete(o)

return obj
Expand Down
21 changes: 11 additions & 10 deletions superset/views/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import json
import logging
import traceback
import yaml

from flask import abort, flash, g, get_flashed_messages, redirect, Response
from flask_appbuilder import BaseView, ModelView
Expand All @@ -13,6 +12,7 @@
from flask_babel import get_locale
from flask_babel import gettext as __
from flask_babel import lazy_gettext as _
import yaml

from superset import appbuilder, conf, db, sm, sql_parse, utils
from superset.connectors.connector_registry import ConnectorRegistry
Expand Down Expand Up @@ -44,12 +44,13 @@ def json_error_response(msg=None, status=500, stacktrace=None, payload=None):


def generate_download_headers(extension, filename=None):
filename = filename if filename else datetime.now().strftime("%Y%m%d_%H%M%S")
content_disp = "attachment; filename={}.{}".format(filename, extension)
headers = {
"Content-Disposition": content_disp,
}
return headers
filename = filename if filename else datetime.now().strftime('%Y%m%d_%H%M%S')
content_disp = 'attachment; filename={}.{}'.format(filename, extension)
headers = {
'Content-Disposition': content_disp,
}
return headers


def api(f):
"""
Expand Down Expand Up @@ -230,16 +231,16 @@ def validate_json(form, field): # noqa


class YamlExportMixin(object):
@action("yaml_export", __("Export as YAML"), __("Export as YAML?"), "fa-download")
@action('yaml_export', __('Export to YAML'), __('Export to YAML?'), 'fa-download')
def yaml_export(self, items):
if not isinstance(items, list):
items = [items]

data = [t.export_to_dict() for t in items]
return Response(
yaml.safe_dump(data),
headers=generate_download_headers("yaml"),
mimetype="application/text")
headers=generate_download_headers('yaml'),
mimetype='application/text')


class DeleteMixin(object):
Expand Down
6 changes: 3 additions & 3 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@
from superset.sql_parse import SupersetQuery
from superset.utils import has_access, merge_extra_filters, QueryStatus
from .base import (
api, BaseSupersetView, CsvResponse, DeleteMixin, get_error_msg,
generate_download_headers, get_user_roles, json_error_response,
SupersetFilter, SupersetModelView, YamlExportMixin
api, BaseSupersetView, CsvResponse, DeleteMixin,
generate_download_headers, get_error_msg, get_user_roles,
json_error_response, SupersetFilter, SupersetModelView, YamlExportMixin,
)

config = app.config
Expand Down
Loading

0 comments on commit bc4f9ac

Please sign in to comment.