Skip to content

Commit

Permalink
chore: add unit tests for the current import functionality (#11786)
Browse files Browse the repository at this point in the history
* chore: add unit tests for the current import functionality

* Improve comment

* Fix unit test
  • Loading branch information
betodealmeida committed Dec 3, 2020
1 parent 5b19398 commit e0288bf
Show file tree
Hide file tree
Showing 5 changed files with 474 additions and 24 deletions.
4 changes: 3 additions & 1 deletion superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,9 @@ def refresh_druid(datasource: str, merge: bool) -> None:
)
def import_dashboards(path: str, recursive: bool, username: str) -> None:
"""Import dashboards from JSON"""
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
from superset.dashboards.commands.importers.dispatcher import (
ImportDashboardsCommand,
)

path_object = Path(path)
files: List[Path] = []
Expand Down
33 changes: 29 additions & 4 deletions superset/datasets/commands/importers/v0.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from typing import Any, Callable, Dict, List, Optional

Expand Down Expand Up @@ -301,9 +302,23 @@ def __init__(
def run(self) -> None:
self.validate()

# TODO (betodealmeida): add rollback in case of error
for file_name, config in self._configs.items():
logger.info("Importing dataset from file %s", file_name)
import_from_dict(db.session, config, sync=self.sync)
if isinstance(config, dict):
import_from_dict(db.session, config, sync=self.sync)
else: # list
for dataset in config:
# UI exports don't have the database metadata, so we assume
# the DB exists and has the same name
params = json.loads(dataset["params"])
database = (
db.session.query(Database)
.filter_by(database_name=params["database_name"])
.one()
)
dataset["database_id"] = database.id
SqlaTable.import_from_dict(db.session, dataset, sync=self.sync)

def validate(self) -> None:
# ensure all files are YAML
Expand All @@ -314,8 +329,18 @@ def validate(self) -> None:
logger.exception("Invalid YAML file")
raise IncorrectVersionError(f"{file_name} is not a valid YAML file")

# check for keys
if DATABASES_KEY not in config and DRUID_CLUSTERS_KEY not in config:
raise IncorrectVersionError(f"{file_name} has no valid keys")
# CLI export
if isinstance(config, dict):
# TODO (betodealmeida): validate with Marshmallow
if DATABASES_KEY not in config and DRUID_CLUSTERS_KEY not in config:
raise IncorrectVersionError(f"{file_name} has no valid keys")

# UI export
elif isinstance(config, list):
# TODO (betodealmeida): validate with Marshmallow
pass

else:
raise IncorrectVersionError(f"{file_name} is not a valid file")

self._configs[file_name] = config
57 changes: 50 additions & 7 deletions tests/dashboards/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,18 @@
from superset import db, security_manager
from superset.commands.exceptions import CommandInvalidError
from superset.commands.importers.exceptions import IncorrectVersionError
from superset.connectors.sqla.models import SqlaTable
from superset.dashboards.commands.exceptions import DashboardNotFoundError
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.dashboards.commands.importers.v1 import ImportDashboardsCommand
from superset.dashboards.commands.importers import v0, v1
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from tests.base_tests import SupersetTestCase
from tests.fixtures.importexport import (
chart_config,
dashboard_config,
dashboard_export,
dashboard_metadata_config,
database_config,
dataset_config,
Expand Down Expand Up @@ -205,6 +209,45 @@ def test_export_dashboard_command_key_order(self, mock_g1, mock_g2):


class TestImportDashboardsCommand(SupersetTestCase):
def test_import_v0_dashboard_cli_export(self):
num_dashboards = db.session.query(Dashboard).count()
num_charts = db.session.query(Slice).count()
num_datasets = db.session.query(SqlaTable).count()
num_databases = db.session.query(Database).count()

contents = {
"20201119_181105.json": json.dumps(dashboard_export),
}
command = v0.ImportDashboardsCommand(contents)
command.run()

new_num_dashboards = db.session.query(Dashboard).count()
new_num_charts = db.session.query(Slice).count()
new_num_datasets = db.session.query(SqlaTable).count()
new_num_databases = db.session.query(Database).count()
assert new_num_dashboards == num_dashboards + 1
assert new_num_charts == num_charts + 1
assert new_num_datasets == num_datasets + 1
assert new_num_databases == num_databases

dashboard = (
db.session.query(Dashboard).filter_by(dashboard_title="Births 2").one()
)
assert len(dashboard.slices) == 1
chart = dashboard.slices[0]
assert chart.slice_name == "Number of California Births"

dataset = chart.table
assert dataset.table_name == "birth_names_2"

database = dataset.database
assert database.database_name == "examples"

db.session.delete(dashboard)
db.session.delete(chart)
db.session.delete(dataset)
db.session.commit()

def test_import_v1_dashboard(self):
"""Test that we can import a dashboard"""
contents = {
Expand All @@ -214,7 +257,7 @@ def test_import_v1_dashboard(self):
"charts/imported_chart.yaml": yaml.safe_dump(chart_config),
"dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
}
command = ImportDashboardsCommand(contents)
command = v1.ImportDashboardsCommand(contents)
command.run()

dashboard = (
Expand Down Expand Up @@ -296,7 +339,7 @@ def test_import_v1_dashboard_multiple(self):
"charts/imported_chart.yaml": yaml.safe_dump(chart_config),
"dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
}
command = ImportDashboardsCommand(contents)
command = v1.ImportDashboardsCommand(contents)
command.run()
command.run()

Expand Down Expand Up @@ -325,7 +368,7 @@ def test_import_v1_dashboard_validation(self):
"charts/imported_chart.yaml": yaml.safe_dump(chart_config),
"dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
}
command = ImportDashboardsCommand(contents)
command = v1.ImportDashboardsCommand(contents)
with pytest.raises(IncorrectVersionError) as excinfo:
command.run()
assert str(excinfo.value) == "Missing metadata.yaml"
Expand All @@ -338,14 +381,14 @@ def test_import_v1_dashboard_validation(self):
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
)
command = ImportDashboardsCommand(contents)
command = v1.ImportDashboardsCommand(contents)
with pytest.raises(IncorrectVersionError) as excinfo:
command.run()
assert str(excinfo.value) == "Must be equal to 1.0.0."

# type should be Database
contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config)
command = ImportDashboardsCommand(contents)
command = v1.ImportDashboardsCommand(contents)
with pytest.raises(CommandInvalidError) as excinfo:
command.run()
assert str(excinfo.value) == "Error importing dashboard"
Expand All @@ -358,7 +401,7 @@ def test_import_v1_dashboard_validation(self):
del broken_config["table_name"]
contents["metadata.yaml"] = yaml.safe_dump(dashboard_metadata_config)
contents["datasets/imported_dataset.yaml"] = yaml.safe_dump(broken_config)
command = ImportDashboardsCommand(contents)
command = v1.ImportDashboardsCommand(contents)
with pytest.raises(CommandInvalidError) as excinfo:
command.run()
assert str(excinfo.value) == "Error importing dashboard"
Expand Down
98 changes: 88 additions & 10 deletions tests/datasets/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=no-self-use, invalid-name
# pylint: disable=no-self-use, invalid-name, line-too-long

from operator import itemgetter
from unittest.mock import patch
Expand All @@ -29,15 +29,17 @@
from superset.databases.commands.importers.v1 import ImportDatabasesCommand
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.commands.importers.v1 import ImportDatasetsCommand
from superset.datasets.commands.importers import v0, v1
from superset.models.core import Database
from superset.utils.core import get_example_database
from tests.base_tests import SupersetTestCase
from tests.fixtures.importexport import (
database_config,
database_metadata_config,
dataset_cli_export,
dataset_config,
dataset_metadata_config,
dataset_ui_export,
)


Expand Down Expand Up @@ -202,14 +204,86 @@ def test_export_dataset_command_key_order(self, mock_g):


class TestImportDatasetsCommand(SupersetTestCase):
def test_import_v0_dataset_cli_export(self):
num_datasets = db.session.query(SqlaTable).count()

contents = {
"20201119_181105.yaml": yaml.safe_dump(dataset_cli_export),
}
command = v0.ImportDatasetsCommand(contents)
command.run()

new_num_datasets = db.session.query(SqlaTable).count()
assert new_num_datasets == num_datasets + 1

dataset = (
db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one()
)
assert (
dataset.params
== '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}'
)
assert len(dataset.metrics) == 2
assert dataset.main_dttm_col == "ds"
assert dataset.filter_select_enabled
assert [col.column_name for col in dataset.columns] == [
"num_california",
"ds",
"state",
"gender",
"name",
"sum_boys",
"sum_girls",
"num",
]

db.session.delete(dataset)
db.session.commit()

def test_import_v0_dataset_ui_export(self):
num_datasets = db.session.query(SqlaTable).count()

contents = {
"20201119_181105.yaml": yaml.safe_dump(dataset_ui_export),
}
command = v0.ImportDatasetsCommand(contents)
command.run()

new_num_datasets = db.session.query(SqlaTable).count()
assert new_num_datasets == num_datasets + 1

dataset = (
db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one()
)
assert (
dataset.params
== '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}'
)
assert len(dataset.metrics) == 2
assert dataset.main_dttm_col == "ds"
assert dataset.filter_select_enabled
assert [col.column_name for col in dataset.columns] == [
"num_california",
"ds",
"state",
"gender",
"name",
"sum_boys",
"sum_girls",
"num",
]

db.session.delete(dataset)
db.session.commit()

def test_import_v1_dataset(self):
"""Test that we can import a dataset"""
contents = {
"metadata.yaml": yaml.safe_dump(dataset_metadata_config),
"databases/imported_database.yaml": yaml.safe_dump(database_config),
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
}
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
command.run()

dataset = (
Expand Down Expand Up @@ -267,7 +341,7 @@ def test_import_v1_dataset_multiple(self):
"databases/imported_database.yaml": yaml.safe_dump(database_config),
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
}
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
command.run()
command.run()
dataset = (
Expand All @@ -285,7 +359,7 @@ def test_import_v1_dataset_multiple(self):
"databases/imported_database.yaml": yaml.safe_dump(database_config),
"datasets/imported_dataset.yaml": yaml.safe_dump(new_config),
}
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
command.run()
dataset = (
db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one()
Expand All @@ -305,7 +379,7 @@ def test_import_v1_dataset_validation(self):
contents = {
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
}
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
with pytest.raises(IncorrectVersionError) as excinfo:
command.run()
assert str(excinfo.value) == "Missing metadata.yaml"
Expand All @@ -318,14 +392,14 @@ def test_import_v1_dataset_validation(self):
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
)
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
with pytest.raises(IncorrectVersionError) as excinfo:
command.run()
assert str(excinfo.value) == "Must be equal to 1.0.0."

# type should be SqlaTable
contents["metadata.yaml"] = yaml.safe_dump(database_metadata_config)
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
with pytest.raises(CommandInvalidError) as excinfo:
command.run()
assert str(excinfo.value) == "Error importing dataset"
Expand All @@ -338,7 +412,7 @@ def test_import_v1_dataset_validation(self):
del broken_config["database_name"]
contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config)
contents["databases/imported_database.yaml"] = yaml.safe_dump(broken_config)
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
with pytest.raises(CommandInvalidError) as excinfo:
command.run()
assert str(excinfo.value) == "Error importing dataset"
Expand Down Expand Up @@ -369,10 +443,14 @@ def test_import_v1_dataset_existing_database(self):
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
"databases/imported_database.yaml": yaml.safe_dump(database_config),
}
command = ImportDatasetsCommand(contents)
command = v1.ImportDatasetsCommand(contents)
command.run()

database = (
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
)
assert len(database.tables) == 1

db.session.delete(database.tables[0])
db.session.delete(database)
db.session.commit()
Loading

0 comments on commit e0288bf

Please sign in to comment.