Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add export_related flag #19215

Merged
merged 2 commits into from
Mar 16, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions superset/charts/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from superset.charts.commands.exceptions import ChartNotFoundError
from superset.charts.dao import ChartDAO
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.models.slice import Slice
from superset.utils.dict_import_export import EXPORT_VERSION

Expand All @@ -43,7 +43,7 @@ class ExportChartsCommand(ExportModelsCommand):
not_found = ChartNotFoundError

@staticmethod
def _export(model: Slice) -> Iterator[Tuple[str, str]]:
def _export(model: Slice, export_related: bool = True) -> Iterator[Tuple[str, str]]:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: i like export_dependencies instead of related

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I used related to keep the same terminology used by FAB, and also because some of these are not technically dependencies (eg, when we export a database you also get the datasets, though they're not dependencies).

chart_slug = secure_filename(model.slice_name)
file_name = f"charts/{chart_slug}_{model.id}.yaml"

Expand Down Expand Up @@ -72,5 +72,5 @@ def _export(model: Slice) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

if model.table:
if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()
16 changes: 16 additions & 0 deletions superset/commands/export/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file

from datetime import datetime
from datetime import timezone
from datetime import datetime, timezone
from typing import Iterator, List, Tuple, Type

import yaml
Expand All @@ -36,14 +34,15 @@ class ExportModelsCommand(BaseCommand):
dao: Type[BaseDAO] = BaseDAO
not_found: Type[CommandException] = CommandException

def __init__(self, model_ids: List[int]):
def __init__(self, model_ids: List[int], export_related: bool = True):
self.model_ids = model_ids
self.export_related = export_related

# this will be set when calling validate()
self._models: List[Model] = []

@staticmethod
def _export(model: Model) -> Iterator[Tuple[str, str]]:
def _export(model: Model, export_related: bool = True) -> Iterator[Tuple[str, str]]:
raise NotImplementedError("Subclasses MUST implement _export")

def run(self) -> Iterator[Tuple[str, str]]:
Expand All @@ -58,7 +57,7 @@ def run(self) -> Iterator[Tuple[str, str]]:

seen = {METADATA_FILE_NAME}
for model in self._models:
for file_name, file_content in self._export(model):
for file_name, file_content in self._export(model, self.export_related):
if file_name not in seen:
yield file_name, file_content
seen.add(file_name)
Expand Down
15 changes: 10 additions & 5 deletions superset/dashboards/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from superset.dashboards.commands.exceptions import DashboardNotFoundError
from superset.dashboards.commands.importers.v1.utils import find_chart_uuids
from superset.dashboards.dao import DashboardDAO
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.dao import DatasetDAO
from superset.models.dashboard import Dashboard
Expand Down Expand Up @@ -106,8 +106,11 @@ class ExportDashboardsCommand(ExportModelsCommand):
dao = DashboardDAO
not_found = DashboardNotFoundError

# pylint: disable=too-many-locals
@staticmethod
def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
def _export(
model: Dashboard, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
dashboard_slug = secure_filename(model.dashboard_title)
file_name = f"dashboards/{dashboard_slug}.yaml"

Expand Down Expand Up @@ -138,7 +141,8 @@ def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
if dataset_id is not None:
dataset = DatasetDAO.find_by_id(dataset_id)
target["datasetUuid"] = str(dataset.uuid)
yield from ExportDatasetsCommand([dataset_id]).run()
if export_related:
yield from ExportDatasetsCommand([dataset_id]).run()

# the mapping between dashboard -> charts is inferred from the position
# attribute, so if it's not present we need to add a default config
Expand All @@ -160,5 +164,6 @@ def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

chart_ids = [chart.id for chart in model.slices]
yield from ExportChartsCommand(chart_ids).run()
if export_related:
chart_ids = [chart.id for chart in model.slices]
yield from ExportChartsCommand(chart_ids).run()
37 changes: 20 additions & 17 deletions superset/databases/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

from superset.databases.commands.exceptions import DatabaseNotFoundError
from superset.databases.dao import DatabaseDAO
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.models.core import Database
from superset.utils.dict_import_export import EXPORT_VERSION

Expand Down Expand Up @@ -55,7 +55,9 @@ class ExportDatabasesCommand(ExportModelsCommand):
not_found = DatabaseNotFoundError

@staticmethod
def _export(model: Database) -> Iterator[Tuple[str, str]]:
def _export(
model: Database, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database_name)
file_name = f"databases/{database_slug}.yaml"

Expand Down Expand Up @@ -90,18 +92,19 @@ def _export(model: Database) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

for dataset in model.tables:
dataset_slug = secure_filename(dataset.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"

payload = dataset.export_to_dict(
recursive=True,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)

file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
if export_related:
for dataset in model.tables:
dataset_slug = secure_filename(dataset.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"

payload = dataset.export_to_dict(
recursive=True,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)

file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
43 changes: 23 additions & 20 deletions superset/datasets/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import yaml
from werkzeug.utils import secure_filename

from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasets.dao import DatasetDAO
Expand All @@ -40,7 +40,9 @@ class ExportDatasetsCommand(ExportModelsCommand):
not_found = DatasetNotFoundError

@staticmethod
def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
def _export(
model: SqlaTable, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database.database_name)
dataset_slug = secure_filename(model.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
Expand Down Expand Up @@ -76,23 +78,24 @@ def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
yield file_name, file_content

# include database as well
file_name = f"databases/{database_slug}.yaml"

payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if payload.get("extra"):
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])
if export_related:
file_name = f"databases/{database_slug}.yaml"

payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if payload.get("extra"):
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])

payload["version"] = EXPORT_VERSION
payload["version"] = EXPORT_VERSION

file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
41 changes: 22 additions & 19 deletions superset/queries/saved_queries/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import yaml
from werkzeug.utils import secure_filename

from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.models.sql_lab import SavedQuery
from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError
from superset.queries.saved_queries.dao import SavedQueryDAO
Expand All @@ -38,7 +38,9 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
not_found = SavedQueryNotFoundError

@staticmethod
def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
def _export(
model: SavedQuery, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
# build filename based on database, optional schema, and label
database_slug = secure_filename(model.database.database_name)
schema_slug = secure_filename(model.schema)
Expand All @@ -58,23 +60,24 @@ def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
yield file_name, file_content

# include database as well
file_name = f"databases/{database_slug}.yaml"
if export_related:
file_name = f"databases/{database_slug}.yaml"

payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if "extra" in payload:
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])
payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if "extra" in payload:
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])

payload["version"] = EXPORT_VERSION
payload["version"] = EXPORT_VERSION

file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
2 changes: 1 addition & 1 deletion superset/utils/async_query_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class AsyncQueryManager:

def __init__(self) -> None:
super().__init__()
self._redis: redis.Redis # type: ignore
self._redis: redis.Redis
self._stream_prefix: str = ""
self._stream_limit: Optional[int]
self._stream_limit_firehose: Optional[int]
Expand Down
20 changes: 20 additions & 0 deletions tests/integration_tests/charts/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,26 @@ def test_export_chart_command_key_order(self, mock_g):
"dataset_uuid",
]

@patch("superset.security.manager.g")
@pytest.mark.usefixtures("load_energy_table_with_slice")
def test_export_chart_command_no_related(self, mock_g):
"""
Test that only the chart is exported when export_related=False.
"""
mock_g.user = security_manager.find_user("admin")

example_chart = (
db.session.query(Slice).filter_by(slice_name="Energy Sankey").one()
)
command = ExportChartsCommand([example_chart.id], export_related=False)
contents = dict(command.run())

expected = [
"metadata.yaml",
f"charts/Energy_Sankey_{example_chart.id}.yaml",
]
assert expected == list(contents.keys())


class TestImportChartsCommand(SupersetTestCase):
@patch("superset.charts.commands.importers.v1.utils.g")
Expand Down
22 changes: 22 additions & 0 deletions tests/integration_tests/dashboards/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,6 +423,28 @@ def test_append_charts(self, mock_suffix):
"DASHBOARD_VERSION_KEY": "v2",
}

@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
@patch("superset.security.manager.g")
@patch("superset.views.base.g")
def test_export_dashboard_command_no_related(self, mock_g1, mock_g2):
"""
Test that only the dashboard is exported when export_related=False.
"""
mock_g1.user = security_manager.find_user("admin")
mock_g2.user = security_manager.find_user("admin")

example_dashboard = (
db.session.query(Dashboard).filter_by(slug="world_health").one()
)
command = ExportDashboardsCommand([example_dashboard.id], export_related=False)
contents = dict(command.run())

expected_paths = {
"metadata.yaml",
"dashboards/World_Banks_Data.yaml",
}
assert expected_paths == set(contents.keys())


class TestImportDashboardsCommand(SupersetTestCase):
def test_import_v0_dashboard_cli_export(self):
Expand Down
20 changes: 20 additions & 0 deletions tests/integration_tests/databases/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,26 @@ def test_export_database_command_key_order(self, mock_g):
"version",
]

@patch("superset.security.manager.g")
@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices", "load_energy_table_with_slice"
)
def test_export_database_command_no_related(self, mock_g):
"""
Test that only databases are exported when export_related=False.
"""
mock_g.user = security_manager.find_user("admin")

example_db = get_example_database()
db_uuid = example_db.uuid

command = ExportDatabasesCommand([example_db.id], export_related=False)
contents = dict(command.run())
prefixes = {path.split("/")[0] for path in contents}
assert "metadata.yaml" in prefixes
assert "databases" in prefixes
assert "datasets" not in prefixes


class TestImportDatabasesCommand(SupersetTestCase):
def test_import_v1_database(self):
Expand Down
Loading