diff --git a/RESOURCES/FEATURE_FLAGS.md b/RESOURCES/FEATURE_FLAGS.md
index 6ce8c1c1ee2dc..ef05ab98db5c5 100644
--- a/RESOURCES/FEATURE_FLAGS.md
+++ b/RESOURCES/FEATURE_FLAGS.md
@@ -88,4 +88,3 @@ These features flags currently default to True and **will be removed in a future
- ENABLE_JAVASCRIPT_CONTROLS
- GENERIC_CHART_AXES
- KV_STORE
-- VERSIONED_EXPORT
diff --git a/UPDATING.md b/UPDATING.md
index b239866221e36..f1f2566736817 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -28,6 +28,7 @@ assists people when migrating to a new version.
### Breaking Changes
+- [26347](https://github.com/apache/superset/issues/26347): Removes the deprecated `VERSIONED_EXPORT` feature flag. The previous value of the feature flag was `True` and now the feature is permanently enabled.
- [26328](https://github.com/apache/superset/issues/26328): Removes the deprecated Filter Box code and it's associated dependencies `react-select` and `array-move`. It also removes the `DeprecatedSelect` and `AsyncSelect` components that were exclusively used by filter boxes. Existing filter boxes will be automatically migrated to native filters.
- [26330](https://github.com/apache/superset/issues/26330): Removes the deprecated `DASHBOARD_FILTERS_EXPERIMENTAL` feature flag. The previous value of the feature flag was `False` and now the feature is permanently removed.
- [26344](https://github.com/apache/superset/issues/26344): Removes the deprecated `ENABLE_EXPLORE_JSON_CSRF_PROTECTION` feature flag. The previous value of the feature flag was `False` and now the feature is permanently removed.
diff --git a/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts b/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts
index 6d5935f04eac6..ba9d0d6680fd8 100644
--- a/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts
+++ b/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts
@@ -57,7 +57,6 @@ export enum FeatureFlag {
TAGGING_SYSTEM = 'TAGGING_SYSTEM',
THUMBNAILS = 'THUMBNAILS',
USE_ANALAGOUS_COLORS = 'USE_ANALAGOUS_COLORS',
- VERSIONED_EXPORT = 'VERSIONED_EXPORT',
}
export type ScheduleQueriesProps = {
JSONSCHEMA: {
diff --git a/superset-frontend/src/features/charts/ChartCard.tsx b/superset-frontend/src/features/charts/ChartCard.tsx
index 352f34d3590e8..38a46eb89873e 100644
--- a/superset-frontend/src/features/charts/ChartCard.tsx
+++ b/superset-frontend/src/features/charts/ChartCard.tsx
@@ -67,8 +67,7 @@ export default function ChartCard({
const history = useHistory();
const canEdit = hasPerm('can_write');
const canDelete = hasPerm('can_write');
- const canExport =
- hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
+ const canExport = hasPerm('can_export');
const theme = useTheme();
const menu = (
diff --git a/superset-frontend/src/pages/ChartList/index.tsx b/superset-frontend/src/pages/ChartList/index.tsx
index 508019666111c..bfd945ea189df 100644
--- a/superset-frontend/src/pages/ChartList/index.tsx
+++ b/superset-frontend/src/pages/ChartList/index.tsx
@@ -234,8 +234,7 @@ function ChartList(props: ChartListProps) {
const canCreate = hasPerm('can_write');
const canEdit = hasPerm('can_write');
const canDelete = hasPerm('can_write');
- const canExport =
- hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
+ const canExport = hasPerm('can_export');
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
const handleBulkChartExport = (chartsToExport: Chart[]) => {
const ids = chartsToExport.map(({ id }) => id);
@@ -777,21 +776,19 @@ function ChartList(props: ChartListProps) {
},
});
- if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
- subMenuButtons.push({
- name: (
-
-
-
- ),
- buttonStyle: 'link',
- onClick: openChartImportModal,
- });
- }
+ subMenuButtons.push({
+ name: (
+
+
+
+ ),
+ buttonStyle: 'link',
+ onClick: openChartImportModal,
+ });
}
return (
diff --git a/superset-frontend/src/pages/DashboardList/index.tsx b/superset-frontend/src/pages/DashboardList/index.tsx
index e82b70185991e..76b34c6652b2b 100644
--- a/superset-frontend/src/pages/DashboardList/index.tsx
+++ b/superset-frontend/src/pages/DashboardList/index.tsx
@@ -183,8 +183,7 @@ function DashboardList(props: DashboardListProps) {
const canCreate = hasPerm('can_write');
const canEdit = hasPerm('can_write');
const canDelete = hasPerm('can_write');
- const canExport =
- hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
+ const canExport = hasPerm('can_export');
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
@@ -659,21 +658,19 @@ function DashboardList(props: DashboardListProps) {
},
});
- if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
- subMenuButtons.push({
- name: (
-
-
-
- ),
- buttonStyle: 'link',
- onClick: openDashboardImportModal,
- });
- }
+ subMenuButtons.push({
+ name: (
+
+
+
+ ),
+ buttonStyle: 'link',
+ onClick: openDashboardImportModal,
+ });
}
return (
<>
diff --git a/superset-frontend/src/pages/DatabaseList/index.tsx b/superset-frontend/src/pages/DatabaseList/index.tsx
index 8c98392aca93e..b30786941ea53 100644
--- a/superset-frontend/src/pages/DatabaseList/index.tsx
+++ b/superset-frontend/src/pages/DatabaseList/index.tsx
@@ -17,8 +17,6 @@
* under the License.
*/
import {
- isFeatureEnabled,
- FeatureFlag,
getExtensionsRegistry,
styled,
SupersetClient,
@@ -216,8 +214,7 @@ function DatabaseList({
const canCreate = hasPerm('can_write');
const canEdit = hasPerm('can_write');
const canDelete = hasPerm('can_write');
- const canExport =
- hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
+ const canExport = hasPerm('can_export');
const { canUploadCSV, canUploadColumnar, canUploadExcel } = uploadUserPerms(
roles,
diff --git a/superset-frontend/src/pages/DatasetList/index.tsx b/superset-frontend/src/pages/DatasetList/index.tsx
index 5aef13dbb6101..2ec521ad45057 100644
--- a/superset-frontend/src/pages/DatasetList/index.tsx
+++ b/superset-frontend/src/pages/DatasetList/index.tsx
@@ -17,8 +17,6 @@
* under the License.
*/
import {
- isFeatureEnabled,
- FeatureFlag,
getExtensionsRegistry,
styled,
SupersetClient,
@@ -207,8 +205,7 @@ const DatasetList: FunctionComponent = ({
const canDelete = hasPerm('can_write');
const canCreate = hasPerm('can_write');
const canDuplicate = hasPerm('can_duplicate');
- const canExport =
- hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
+ const canExport = hasPerm('can_export');
const initialSort = SORT_BY;
@@ -654,21 +651,19 @@ const DatasetList: FunctionComponent = ({
buttonStyle: 'primary',
});
- if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
- buttonArr.push({
- name: (
-
-
-
- ),
- buttonStyle: 'link',
- onClick: openDatasetImportModal,
- });
- }
+ buttonArr.push({
+ name: (
+
+
+
+ ),
+ buttonStyle: 'link',
+ onClick: openDatasetImportModal,
+ });
}
menuData.buttons = buttonArr;
diff --git a/superset-frontend/src/pages/SavedQueryList/index.tsx b/superset-frontend/src/pages/SavedQueryList/index.tsx
index d48ffef8c90c3..958107aa5d253 100644
--- a/superset-frontend/src/pages/SavedQueryList/index.tsx
+++ b/superset-frontend/src/pages/SavedQueryList/index.tsx
@@ -159,8 +159,7 @@ function SavedQueryList({
const canCreate = hasPerm('can_write');
const canEdit = hasPerm('can_write');
const canDelete = hasPerm('can_write');
- const canExport =
- hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT);
+ const canExport = hasPerm('can_export');
const handleSavedQueryPreview = useCallback(
(id: number) => {
@@ -204,7 +203,7 @@ function SavedQueryList({
buttonStyle: 'primary',
});
- if (canCreate && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
+ if (canCreate) {
subMenuButtons.push({
name: (
None:
)
-if feature_flags.get("VERSIONED_EXPORT"):
+@click.command()
+@with_appcontext
+@click.option(
+ "--dashboard-file",
+ "-f",
+ help="Specify the file to export to",
+)
+def export_dashboards(dashboard_file: Optional[str] = None) -> None:
+ """Export dashboards to ZIP file"""
+ # pylint: disable=import-outside-toplevel
+ from superset.commands.dashboard.export import ExportDashboardsCommand
+ from superset.models.dashboard import Dashboard
+
+ g.user = security_manager.find_user(username="admin")
+
+ dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
+ timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+ root = f"dashboard_export_{timestamp}"
+ dashboard_file = dashboard_file or f"{root}.zip"
+
+ try:
+ with ZipFile(dashboard_file, "w") as bundle:
+ for file_name, file_content in ExportDashboardsCommand(dashboard_ids).run():
+ with bundle.open(f"{root}/{file_name}", "w") as fp:
+ fp.write(file_content.encode())
+ except Exception: # pylint: disable=broad-except
+ logger.exception(
+ "There was an error when exporting the dashboards, please check "
+ "the exception traceback in the log"
+ )
+ sys.exit(1)
- @click.command()
- @with_appcontext
- @click.option(
- "--dashboard-file",
- "-f",
- help="Specify the file to export to",
- )
- def export_dashboards(dashboard_file: Optional[str] = None) -> None:
- """Export dashboards to ZIP file"""
- # pylint: disable=import-outside-toplevel
- from superset.commands.dashboard.export import ExportDashboardsCommand
- from superset.models.dashboard import Dashboard
-
- g.user = security_manager.find_user(username="admin")
-
- dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()]
- timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
- root = f"dashboard_export_{timestamp}"
- dashboard_file = dashboard_file or f"{root}.zip"
-
- try:
- with ZipFile(dashboard_file, "w") as bundle:
- for file_name, file_content in ExportDashboardsCommand(
- dashboard_ids
- ).run():
- with bundle.open(f"{root}/{file_name}", "w") as fp:
- fp.write(file_content.encode())
- except Exception: # pylint: disable=broad-except
- logger.exception(
- "There was an error when exporting the dashboards, please check "
- "the exception traceback in the log"
- )
- sys.exit(1)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--datasource-file",
- "-f",
- help="Specify the file to export to",
- )
- def export_datasources(datasource_file: Optional[str] = None) -> None:
- """Export datasources to ZIP file"""
- # pylint: disable=import-outside-toplevel
- from superset.commands.dataset.export import ExportDatasetsCommand
- from superset.connectors.sqla.models import SqlaTable
-
- g.user = security_manager.find_user(username="admin")
-
- dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
- timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
- root = f"dataset_export_{timestamp}"
- datasource_file = datasource_file or f"{root}.zip"
-
- try:
- with ZipFile(datasource_file, "w") as bundle:
- for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
- with bundle.open(f"{root}/{file_name}", "w") as fp:
- fp.write(file_content.encode())
- except Exception: # pylint: disable=broad-except
- logger.exception(
- "There was an error when exporting the datasets, please check "
- "the exception traceback in the log"
- )
- sys.exit(1)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--path",
- "-p",
- help="Path to a single ZIP file",
- )
- @click.option(
- "--username",
- "-u",
- default=None,
- help="Specify the user name to assign dashboards to",
- )
- def import_dashboards(path: str, username: Optional[str]) -> None:
- """Import dashboards from ZIP file"""
- # pylint: disable=import-outside-toplevel
- from superset.commands.dashboard.importers.dispatcher import (
- ImportDashboardsCommand,
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--datasource-file",
+ "-f",
+ help="Specify the file to export to",
+)
+def export_datasources(datasource_file: Optional[str] = None) -> None:
+ """Export datasources to ZIP file"""
+ # pylint: disable=import-outside-toplevel
+ from superset.commands.dataset.export import ExportDatasetsCommand
+ from superset.connectors.sqla.models import SqlaTable
+
+ g.user = security_manager.find_user(username="admin")
+
+ dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()]
+ timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+ root = f"dataset_export_{timestamp}"
+ datasource_file = datasource_file or f"{root}.zip"
+
+ try:
+ with ZipFile(datasource_file, "w") as bundle:
+ for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
+ with bundle.open(f"{root}/{file_name}", "w") as fp:
+ fp.write(file_content.encode())
+ except Exception: # pylint: disable=broad-except
+ logger.exception(
+ "There was an error when exporting the datasets, please check "
+ "the exception traceback in the log"
)
- from superset.commands.importers.v1.utils import get_contents_from_bundle
-
- if username is not None:
- g.user = security_manager.find_user(username=username)
- if is_zipfile(path):
- with ZipFile(path) as bundle:
- contents = get_contents_from_bundle(bundle)
- else:
- with open(path) as file:
- contents = {path: file.read()}
- try:
- ImportDashboardsCommand(contents, overwrite=True).run()
- except Exception: # pylint: disable=broad-except
- logger.exception(
- "There was an error when importing the dashboards(s), please check "
- "the exception traceback in the log"
- )
- sys.exit(1)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--path",
- "-p",
- help="Path to a single ZIP file",
- )
- def import_datasources(path: str) -> None:
- """Import datasources from ZIP file"""
- # pylint: disable=import-outside-toplevel
- from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand
- from superset.commands.importers.v1.utils import get_contents_from_bundle
-
- if is_zipfile(path):
- with ZipFile(path) as bundle:
- contents = get_contents_from_bundle(bundle)
- else:
- with open(path) as file:
- contents = {path: file.read()}
- try:
- ImportDatasetsCommand(contents, overwrite=True).run()
- except Exception: # pylint: disable=broad-except
- logger.exception(
- "There was an error when importing the dataset(s), please check the "
- "exception traceback in the log"
- )
- sys.exit(1)
-
-else:
-
- @click.command()
- @with_appcontext
- @click.option(
- "--dashboard-file",
- "-f",
- default=None,
- help="Specify the file to export to",
- )
- @click.option(
- "--print_stdout",
- "-p",
- is_flag=True,
- default=False,
- help="Print JSON to stdout",
- )
- def export_dashboards(
- dashboard_file: Optional[str], print_stdout: bool = False
- ) -> None:
- """Export dashboards to JSON"""
- # pylint: disable=import-outside-toplevel
- from superset.utils import dashboard_import_export
-
- data = dashboard_import_export.export_dashboards(db.session)
- if print_stdout or not dashboard_file:
- print(data)
- if dashboard_file:
- logger.info("Exporting dashboards to %s", dashboard_file)
- with open(dashboard_file, "w") as data_stream:
- data_stream.write(data)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--datasource-file",
- "-f",
- default=None,
- help="Specify the file to export to",
- )
- @click.option(
- "--print_stdout",
- "-p",
- is_flag=True,
- default=False,
- help="Print YAML to stdout",
- )
- @click.option(
- "--back-references",
- "-b",
- is_flag=True,
- default=False,
- help="Include parent back references",
- )
- @click.option(
- "--include-defaults",
- "-d",
- is_flag=True,
- default=False,
- help="Include fields containing defaults",
- )
- def export_datasources(
- datasource_file: Optional[str],
- print_stdout: bool = False,
- back_references: bool = False,
- include_defaults: bool = False,
- ) -> None:
- """Export datasources to YAML"""
- # pylint: disable=import-outside-toplevel
- from superset.utils import dict_import_export
-
- data = dict_import_export.export_to_dict(
- session=db.session,
- recursive=True,
- back_references=back_references,
- include_defaults=include_defaults,
+ sys.exit(1)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--path",
+ "-p",
+ help="Path to a single ZIP file",
+)
+@click.option(
+ "--username",
+ "-u",
+ default=None,
+ help="Specify the user name to assign dashboards to",
+)
+def import_dashboards(path: str, username: Optional[str]) -> None:
+ """Import dashboards from ZIP file"""
+ # pylint: disable=import-outside-toplevel
+ from superset.commands.dashboard.importers.dispatcher import ImportDashboardsCommand
+ from superset.commands.importers.v1.utils import get_contents_from_bundle
+
+ if username is not None:
+ g.user = security_manager.find_user(username=username)
+ if is_zipfile(path):
+ with ZipFile(path) as bundle:
+ contents = get_contents_from_bundle(bundle)
+ else:
+ with open(path) as file:
+ contents = {path: file.read()}
+ try:
+ ImportDashboardsCommand(contents, overwrite=True).run()
+ except Exception: # pylint: disable=broad-except
+ logger.exception(
+ "There was an error when importing the dashboards(s), please check "
+ "the exception traceback in the log"
)
- if print_stdout or not datasource_file:
- yaml.safe_dump(data, sys.stdout, default_flow_style=False)
- if datasource_file:
- logger.info("Exporting datasources to %s", datasource_file)
- with open(datasource_file, "w") as data_stream:
- yaml.safe_dump(data, data_stream, default_flow_style=False)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--path",
- "-p",
- help="Path to a single JSON file or path containing multiple JSON "
- "files to import (*.json)",
- )
- @click.option(
- "--recursive",
- "-r",
- is_flag=True,
- default=False,
- help="recursively search the path for json files",
- )
- @click.option(
- "--username",
- "-u",
- default=None,
- help="Specify the user name to assign dashboards to",
- )
- def import_dashboards(path: str, recursive: bool, username: str) -> None:
- """Import dashboards from JSON file"""
- # pylint: disable=import-outside-toplevel
- from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand
-
- path_object = Path(path)
- files: list[Path] = []
- if path_object.is_file():
- files.append(path_object)
- elif path_object.exists() and not recursive:
- files.extend(path_object.glob("*.json"))
- elif path_object.exists() and recursive:
- files.extend(path_object.rglob("*.json"))
- if username is not None:
- g.user = security_manager.find_user(username=username)
- contents = {}
- for path_ in files:
- with open(path_) as file:
- contents[path_.name] = file.read()
- try:
- ImportDashboardsCommand(contents).run()
- except Exception: # pylint: disable=broad-except
- logger.exception("Error when importing dashboard")
- sys.exit(1)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--path",
- "-p",
- help="Path to a single YAML file or path containing multiple YAML "
- "files to import (*.yaml or *.yml)",
- )
- @click.option(
- "--sync",
- "-s",
- "sync",
- default="",
- help="comma separated list of element types to synchronize "
- 'e.g. "metrics,columns" deletes metrics and columns in the DB '
- "that are not specified in the YAML file",
- )
- @click.option(
- "--recursive",
- "-r",
- is_flag=True,
- default=False,
- help="recursively search the path for yaml files",
- )
- def import_datasources(path: str, sync: str, recursive: bool) -> None:
- """Import datasources from YAML"""
- # pylint: disable=import-outside-toplevel
- from superset.commands.dataset.importers.v0 import ImportDatasetsCommand
-
- sync_array = sync.split(",")
- sync_columns = "columns" in sync_array
- sync_metrics = "metrics" in sync_array
-
- path_object = Path(path)
- files: list[Path] = []
- if path_object.is_file():
- files.append(path_object)
- elif path_object.exists() and not recursive:
- files.extend(path_object.glob("*.yaml"))
- files.extend(path_object.glob("*.yml"))
- elif path_object.exists() and recursive:
- files.extend(path_object.rglob("*.yaml"))
- files.extend(path_object.rglob("*.yml"))
- contents = {}
- for path_ in files:
- with open(path_) as file:
- contents[path_.name] = file.read()
- try:
- ImportDatasetsCommand(
- contents, sync_columns=sync_columns, sync_metrics=sync_metrics
- ).run()
- except Exception: # pylint: disable=broad-except
- logger.exception("Error when importing dataset")
- sys.exit(1)
-
- @click.command()
- @with_appcontext
- @click.option(
- "--back-references",
- "-b",
- is_flag=True,
- default=False,
- help="Include parent back references",
- )
- def export_datasource_schema(back_references: bool) -> None:
- """Export datasource YAML schema to stdout"""
- # pylint: disable=import-outside-toplevel
- from superset.utils import dict_import_export
+ sys.exit(1)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--path",
+ "-p",
+ help="Path to a single ZIP file",
+)
+def import_datasources(path: str) -> None:
+ """Import datasources from ZIP file"""
+ # pylint: disable=import-outside-toplevel
+ from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand
+ from superset.commands.importers.v1.utils import get_contents_from_bundle
+
+ if is_zipfile(path):
+ with ZipFile(path) as bundle:
+ contents = get_contents_from_bundle(bundle)
+ else:
+ with open(path) as file:
+ contents = {path: file.read()}
+ try:
+ ImportDatasetsCommand(contents, overwrite=True).run()
+ except Exception: # pylint: disable=broad-except
+ logger.exception(
+ "There was an error when importing the dataset(s), please check the "
+ "exception traceback in the log"
+ )
+ sys.exit(1)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--dashboard-file",
+ "-f",
+ default=None,
+ help="Specify the file to export to",
+)
+@click.option(
+ "--print_stdout",
+ "-p",
+ is_flag=True,
+ default=False,
+ help="Print JSON to stdout",
+)
+def legacy_export_dashboards(
+ dashboard_file: Optional[str], print_stdout: bool = False
+) -> None:
+ """Export dashboards to JSON"""
+ # pylint: disable=import-outside-toplevel
+ from superset.utils import dashboard_import_export
+
+ data = dashboard_import_export.export_dashboards(db.session)
+ if print_stdout or not dashboard_file:
+ print(data)
+ if dashboard_file:
+ logger.info("Exporting dashboards to %s", dashboard_file)
+ with open(dashboard_file, "w") as data_stream:
+ data_stream.write(data)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--datasource-file",
+ "-f",
+ default=None,
+ help="Specify the file to export to",
+)
+@click.option(
+ "--print_stdout",
+ "-p",
+ is_flag=True,
+ default=False,
+ help="Print YAML to stdout",
+)
+@click.option(
+ "--back-references",
+ "-b",
+ is_flag=True,
+ default=False,
+ help="Include parent back references",
+)
+@click.option(
+ "--include-defaults",
+ "-d",
+ is_flag=True,
+ default=False,
+ help="Include fields containing defaults",
+)
+def legacy_export_datasources(
+ datasource_file: Optional[str],
+ print_stdout: bool = False,
+ back_references: bool = False,
+ include_defaults: bool = False,
+) -> None:
+ """Export datasources to YAML"""
+ # pylint: disable=import-outside-toplevel
+ from superset.utils import dict_import_export
- data = dict_import_export.export_schema_to_dict(back_references=back_references)
+ data = dict_import_export.export_to_dict(
+ session=db.session,
+ recursive=True,
+ back_references=back_references,
+ include_defaults=include_defaults,
+ )
+ if print_stdout or not datasource_file:
yaml.safe_dump(data, sys.stdout, default_flow_style=False)
+ if datasource_file:
+ logger.info("Exporting datasources to %s", datasource_file)
+ with open(datasource_file, "w") as data_stream:
+ yaml.safe_dump(data, data_stream, default_flow_style=False)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--path",
+ "-p",
+ help="Path to a single JSON file or path containing multiple JSON "
+ "files to import (*.json)",
+)
+@click.option(
+ "--recursive",
+ "-r",
+ is_flag=True,
+ default=False,
+ help="recursively search the path for json files",
+)
+@click.option(
+ "--username",
+ "-u",
+ default=None,
+ help="Specify the user name to assign dashboards to",
+)
+def legacy_import_dashboards(path: str, recursive: bool, username: str) -> None:
+ """Import dashboards from JSON file"""
+ # pylint: disable=import-outside-toplevel
+ from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand
+
+ path_object = Path(path)
+ files: list[Path] = []
+ if path_object.is_file():
+ files.append(path_object)
+ elif path_object.exists() and not recursive:
+ files.extend(path_object.glob("*.json"))
+ elif path_object.exists() and recursive:
+ files.extend(path_object.rglob("*.json"))
+ if username is not None:
+ g.user = security_manager.find_user(username=username)
+ contents = {}
+ for path_ in files:
+ with open(path_) as file:
+ contents[path_.name] = file.read()
+ try:
+ ImportDashboardsCommand(contents).run()
+ except Exception: # pylint: disable=broad-except
+ logger.exception("Error when importing dashboard")
+ sys.exit(1)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--path",
+ "-p",
+ help="Path to a single YAML file or path containing multiple YAML "
+ "files to import (*.yaml or *.yml)",
+)
+@click.option(
+ "--sync",
+ "-s",
+ "sync",
+ default="",
+ help="comma separated list of element types to synchronize "
+ 'e.g. "metrics,columns" deletes metrics and columns in the DB '
+ "that are not specified in the YAML file",
+)
+@click.option(
+ "--recursive",
+ "-r",
+ is_flag=True,
+ default=False,
+ help="recursively search the path for yaml files",
+)
+def legacy_import_datasources(path: str, sync: str, recursive: bool) -> None:
+ """Import datasources from YAML"""
+ # pylint: disable=import-outside-toplevel
+ from superset.commands.dataset.importers.v0 import ImportDatasetsCommand
+
+ sync_array = sync.split(",")
+ sync_columns = "columns" in sync_array
+ sync_metrics = "metrics" in sync_array
+
+ path_object = Path(path)
+ files: list[Path] = []
+ if path_object.is_file():
+ files.append(path_object)
+ elif path_object.exists() and not recursive:
+ files.extend(path_object.glob("*.yaml"))
+ files.extend(path_object.glob("*.yml"))
+ elif path_object.exists() and recursive:
+ files.extend(path_object.rglob("*.yaml"))
+ files.extend(path_object.rglob("*.yml"))
+ contents = {}
+ for path_ in files:
+ with open(path_) as file:
+ contents[path_.name] = file.read()
+ try:
+ ImportDatasetsCommand(
+ contents, sync_columns=sync_columns, sync_metrics=sync_metrics
+ ).run()
+ except Exception: # pylint: disable=broad-except
+ logger.exception("Error when importing dataset")
+ sys.exit(1)
+
+
+@click.command()
+@with_appcontext
+@click.option(
+ "--back-references",
+ "-b",
+ is_flag=True,
+ default=False,
+ help="Include parent back references",
+)
+def legacy_export_datasource_schema(back_references: bool) -> None:
+ """Export datasource YAML schema to stdout"""
+ # pylint: disable=import-outside-toplevel
+ from superset.utils import dict_import_export
+
+ data = dict_import_export.export_schema_to_dict(back_references=back_references)
+ yaml.safe_dump(data, sys.stdout, default_flow_style=False)
diff --git a/superset/config.py b/superset/config.py
index 474cb0b99b880..4d0eda4b2f048 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -432,7 +432,6 @@ class D3Format(TypedDict, total=False):
"DASHBOARD_CROSS_FILTERS": True, # deprecated
"DASHBOARD_VIRTUALIZATION": True,
"GLOBAL_ASYNC_QUERIES": False,
- "VERSIONED_EXPORT": True, # deprecated
"EMBEDDED_SUPERSET": False,
# Enables Alerts and reports new implementation
"ALERT_REPORTS": False,
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index 783543e45de8b..e719a27fc743b 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -23,7 +23,7 @@
from typing import Any, Callable, cast, Optional
from zipfile import is_zipfile, ZipFile
-from flask import make_response, redirect, request, Response, send_file, url_for
+from flask import redirect, request, Response, send_file, url_for
from flask_appbuilder import permission_name
from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.hooks import before_request
@@ -85,7 +85,6 @@
from superset.tasks.utils import get_current_user
from superset.utils.screenshots import DashboardScreenshot
from superset.utils.urls import get_url_path
-from superset.views.base import generate_download_headers
from superset.views.base_api import (
BaseSupersetModelRestApi,
RelatedFieldFilter,
@@ -714,7 +713,7 @@ def bulk_delete(self, **kwargs: Any) -> Response:
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
log_to_statsd=False,
)
- def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
+ def export(self, **kwargs: Any) -> Response:
"""Download multiple dashboards as YAML files.
---
get:
@@ -745,50 +744,32 @@ def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
$ref: '#/components/responses/500'
"""
requested_ids = kwargs["rison"]
- token = request.args.get("token")
- if is_feature_enabled("VERSIONED_EXPORT"):
- timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
- root = f"dashboard_export_{timestamp}"
- filename = f"{root}.zip"
+ timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+ root = f"dashboard_export_{timestamp}"
+ filename = f"{root}.zip"
- buf = BytesIO()
- with ZipFile(buf, "w") as bundle:
- try:
- for file_name, file_content in ExportDashboardsCommand(
- requested_ids
- ).run():
- with bundle.open(f"{root}/{file_name}", "w") as fp:
- fp.write(file_content.encode())
- except DashboardNotFoundError:
- return self.response_404()
- buf.seek(0)
+ buf = BytesIO()
+ with ZipFile(buf, "w") as bundle:
+ try:
+ for file_name, file_content in ExportDashboardsCommand(
+ requested_ids
+ ).run():
+ with bundle.open(f"{root}/{file_name}", "w") as fp:
+ fp.write(file_content.encode())
+ except DashboardNotFoundError:
+ return self.response_404()
+ buf.seek(0)
- response = send_file(
- buf,
- mimetype="application/zip",
- as_attachment=True,
- download_name=filename,
- )
- if token:
- response.set_cookie(token, "done", max_age=600)
- return response
-
- query = self.datamodel.session.query(Dashboard).filter(
- Dashboard.id.in_(requested_ids)
+ response = send_file(
+ buf,
+ mimetype="application/zip",
+ as_attachment=True,
+ download_name=filename,
)
- query = self._base_filters.apply_all(query)
- ids = {item.id for item in query.all()}
- if not ids:
- return self.response_404()
- export = Dashboard.export_dashboards(ids)
- resp = make_response(export, 200)
- resp.headers["Content-Disposition"] = generate_download_headers("json")[
- "Content-Disposition"
- ]
- if token:
- resp.set_cookie(token, "done", max_age=600)
- return resp
+ if token := request.args.get("token"):
+ response.set_cookie(token, "done", max_age=600)
+ return response
@expose("//thumbnail//", methods=("GET",))
@protect()
diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index 809074e10ddb2..8cc1b2df6301d 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -22,14 +22,13 @@
from typing import Any
from zipfile import is_zipfile, ZipFile
-import yaml
from flask import request, Response, send_file
from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
from marshmallow import ValidationError
-from superset import event_logger, is_feature_enabled
+from superset import event_logger
from superset.commands.dataset.create import CreateDatasetCommand
from superset.commands.dataset.delete import DeleteDatasetCommand
from superset.commands.dataset.duplicate import DuplicateDatasetCommand
@@ -68,7 +67,7 @@
openapi_spec_methods_override,
)
from superset.utils.core import parse_boolean_string
-from superset.views.base import DatasourceFilter, generate_download_headers
+from superset.views.base import DatasourceFilter
from superset.views.base_api import (
BaseSupersetModelRestApi,
RelatedFieldFilter,
@@ -489,7 +488,7 @@ def delete(self, pk: int) -> Response:
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.export",
log_to_statsd=False,
)
- def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
+ def export(self, **kwargs: Any) -> Response:
"""Download multiple datasets as YAML files.
---
get:
@@ -519,49 +518,31 @@ def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals
"""
requested_ids = kwargs["rison"]
- if is_feature_enabled("VERSIONED_EXPORT"):
- token = request.args.get("token")
- timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
- root = f"dataset_export_{timestamp}"
- filename = f"{root}.zip"
+ timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
+ root = f"dataset_export_{timestamp}"
+ filename = f"{root}.zip"
- buf = BytesIO()
- with ZipFile(buf, "w") as bundle:
- try:
- for file_name, file_content in ExportDatasetsCommand(
- requested_ids
- ).run():
- with bundle.open(f"{root}/{file_name}", "w") as fp:
- fp.write(file_content.encode())
- except DatasetNotFoundError:
- return self.response_404()
- buf.seek(0)
+ buf = BytesIO()
+ with ZipFile(buf, "w") as bundle:
+ try:
+ for file_name, file_content in ExportDatasetsCommand(
+ requested_ids
+ ).run():
+ with bundle.open(f"{root}/{file_name}", "w") as fp:
+ fp.write(file_content.encode())
+ except DatasetNotFoundError:
+ return self.response_404()
+ buf.seek(0)
- response = send_file(
- buf,
- mimetype="application/zip",
- as_attachment=True,
- download_name=filename,
- )
- if token:
- response.set_cookie(token, "done", max_age=600)
- return response
-
- query = self.datamodel.session.query(SqlaTable).filter(
- SqlaTable.id.in_(requested_ids)
- )
- query = self._base_filters.apply_all(query)
- items = query.all()
- ids = [item.id for item in items]
- if len(ids) != len(requested_ids):
- return self.response_404()
-
- data = [t.export_to_dict() for t in items]
- return Response(
- yaml.safe_dump(data),
- headers=generate_download_headers("yaml"),
- mimetype="application/text",
+ response = send_file(
+ buf,
+ mimetype="application/zip",
+ as_attachment=True,
+ download_name=filename,
)
+ if token := request.args.get("token"):
+ response.set_cookie(token, "done", max_age=600)
+ return response
@expose("/duplicate", methods=("POST",))
@protect()
diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py
index 807f430ee44ab..2e5f5a716a6f8 100644
--- a/superset/initialization/__init__.py
+++ b/superset/initialization/__init__.py
@@ -118,7 +118,6 @@ def init_views(self) -> None:
# the global Flask app
#
# pylint: disable=import-outside-toplevel,too-many-locals,too-many-statements
- from superset import security_manager
from superset.advanced_data_type.api import AdvancedDataTypeRestApi
from superset.annotation_layers.annotations.api import AnnotationRestApi
from superset.annotation_layers.api import AnnotationLayerRestApi
@@ -327,20 +326,6 @@ def init_views(self) -> None:
#
# Add links
#
- appbuilder.add_link(
- "Import Dashboards",
- label=__("Import Dashboards"),
- href="/superset/import_dashboards/",
- icon="fa-cloud-upload",
- category="Manage",
- category_label=__("Manage"),
- category_icon="fa-wrench",
- cond=lambda: (
- security_manager.can_access("can_import_dashboards", "Superset")
- and not feature_flag_manager.is_feature_enabled("VERSIONED_EXPORT")
- ),
- )
-
appbuilder.add_link(
"SQL Editor",
label=__("SQL Lab"),
diff --git a/superset/views/core.py b/superset/views/core.py
index 330d517a37d4c..307cd08c6414e 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -47,7 +47,6 @@
from superset.async_events.async_query_manager import AsyncQueryTokenException
from superset.commands.chart.exceptions import ChartNotFoundError
from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand
-from superset.commands.dashboard.importers.v0 import ImportDashboardsCommand
from superset.commands.dashboard.permalink.get import GetDashboardPermalinkCommand
from superset.commands.dataset.exceptions import DatasetNotFoundError
from superset.commands.explore.form_data.create import CreateFormDataCommand
@@ -61,7 +60,6 @@
from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError
from superset.exceptions import (
CacheLoadError,
- DatabaseNotFound,
SupersetException,
SupersetSecurityException,
)
@@ -345,55 +343,6 @@ def explore_json(
except SupersetException as ex:
return json_error_response(utils.error_msg_from_exception(ex), 400)
- @has_access
- @event_logger.log_this
- @expose(
- "/import_dashboards/",
- methods=(
- "GET",
- "POST",
- ),
- )
- def import_dashboards(self) -> FlaskResponse:
- """Overrides the dashboards using json instances from the file."""
- import_file = request.files.get("file")
- if request.method == "POST" and import_file:
- success = False
- database_id = request.form.get("db_id")
- try:
- ImportDashboardsCommand(
- {import_file.filename: import_file.read()}, database_id
- ).run()
- success = True
- except DatabaseNotFound as ex:
- logger.exception(ex)
- flash(
- _(
- "Cannot import dashboard: %(db_error)s.\n"
- "Make sure to create the database before "
- "importing the dashboard.",
- db_error=ex,
- ),
- "danger",
- )
- except Exception as ex: # pylint: disable=broad-except
- logger.exception(ex)
- flash(
- _(
- "An unknown error occurred. "
- "Please contact your Superset administrator"
- ),
- "danger",
- )
- if success:
- flash("Dashboard(s) have been imported", "success")
- return redirect("/dashboard/list/")
-
- databases = db.session.query(Database).all()
- return self.render_template(
- "superset/import_dashboards.html", databases=databases
- )
-
@staticmethod
def get_redirect_url() -> str:
"""Assembles the redirect URL to the new endpoint. It also replaces
diff --git a/tests/integration_tests/cli_tests.py b/tests/integration_tests/cli_tests.py
index 55557ab32deac..2441809b0da64 100644
--- a/tests/integration_tests/cli_tests.py
+++ b/tests/integration_tests/cli_tests.py
@@ -16,7 +16,6 @@
# under the License.
import importlib
-import json
import logging
from pathlib import Path
from unittest import mock
@@ -49,69 +48,7 @@ def assert_cli_fails_properly(response, caplog):
assert caplog.records[-1].levelname == "ERROR"
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
-)
-@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
-def test_export_dashboards_original(app_context, fs):
- """
- Test that a JSON file is exported.
- """
- # pylint: disable=reimported, redefined-outer-name
- import superset.cli.importexport # noqa: F811
-
- # reload to define export_dashboards correctly based on the
- # feature flags
- importlib.reload(superset.cli.importexport)
-
- runner = app.test_cli_runner()
- response = runner.invoke(
- superset.cli.importexport.export_dashboards, ("-f", "dashboards.json")
- )
-
- assert response.exit_code == 0
- assert Path("dashboards.json").exists()
-
- # check that file is valid JSON
- with open("dashboards.json") as fp:
- contents = fp.read()
- json.loads(contents)
-
-
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
-)
-@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
-def test_export_datasources_original(app_context, fs):
- """
- Test that a YAML file is exported.
- """
- # pylint: disable=reimported, redefined-outer-name
- import superset.cli.importexport # noqa: F811
-
- # reload to define export_dashboards correctly based on the
- # feature flags
- importlib.reload(superset.cli.importexport)
-
- runner = app.test_cli_runner()
- response = runner.invoke(
- superset.cli.importexport.export_datasources, ("-f", "datasources.yaml")
- )
-
- assert response.exit_code == 0
-
- assert Path("datasources.yaml").exists()
-
- # check that file is valid JSON
- with open("datasources.yaml") as fp:
- contents = fp.read()
- yaml.safe_load(contents)
-
-
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
def test_export_dashboards_versioned_export(app_context, fs):
"""
Test that a ZIP file is exported.
@@ -133,9 +70,6 @@ def test_export_dashboards_versioned_export(app_context, fs):
assert is_zipfile("dashboard_export_20210101T000000.zip")
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
@mock.patch(
"superset.commands.dashboard.export.ExportDashboardsCommand.run",
side_effect=Exception(),
@@ -163,9 +97,6 @@ def test_failing_export_dashboards_versioned_export(
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
def test_export_datasources_versioned_export(app_context, fs):
"""
Test that a ZIP file is exported.
@@ -187,9 +118,6 @@ def test_export_datasources_versioned_export(app_context, fs):
assert is_zipfile("dataset_export_20210101T000000.zip")
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
@mock.patch(
"superset.commands.dashboard.export.ExportDatasetsCommand.run",
side_effect=Exception(),
@@ -214,9 +142,6 @@ def test_failing_export_datasources_versioned_export(
assert_cli_fails_properly(response, caplog)
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
@mock.patch("superset.commands.dashboard.importers.dispatcher.ImportDashboardsCommand")
def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs):
"""
@@ -257,9 +182,6 @@ def test_import_dashboards_versioned_export(import_dashboards_command, app_conte
import_dashboards_command.assert_called_with(expected_contents, overwrite=True)
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
@mock.patch(
"superset.commands.dashboard.importers.dispatcher.ImportDashboardsCommand.run",
side_effect=Exception(),
@@ -301,9 +223,6 @@ def test_failing_import_dashboards_versioned_export(
assert_cli_fails_properly(response, caplog)
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
@mock.patch("superset.commands.dataset.importers.dispatcher.ImportDatasetsCommand")
def test_import_datasets_versioned_export(import_datasets_command, app_context, fs):
"""
@@ -344,120 +263,6 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context,
import_datasets_command.assert_called_with(expected_contents, overwrite=True)
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
-)
-@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand")
-def test_import_datasets_sync_argument_columns_metrics(
- import_datasets_command, app_context, fs
-):
- """
- Test that the --sync command line argument syncs dataset in superset
- with YAML file. Using both columns and metrics with the --sync flag
- """
- # pylint: disable=reimported, redefined-outer-name
- import superset.cli.importexport # noqa: F811
-
- # reload to define export_datasets correctly based on the
- # feature flags
- importlib.reload(superset.cli.importexport)
-
- # write YAML file
- with open("dataset.yaml", "w") as fp:
- fp.write("hello: world")
-
- runner = app.test_cli_runner()
- response = runner.invoke(
- superset.cli.importexport.import_datasources,
- ["-p", "dataset.yaml", "-s", "metrics,columns"],
- )
-
- assert response.exit_code == 0
- expected_contents = {"dataset.yaml": "hello: world"}
- import_datasets_command.assert_called_with(
- expected_contents,
- sync_columns=True,
- sync_metrics=True,
- )
-
-
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
-)
-@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand")
-def test_import_datasets_sync_argument_columns(
- import_datasets_command, app_context, fs
-):
- """
- Test that the --sync command line argument syncs dataset in superset
- with YAML file. Using only columns with the --sync flag
- """
- # pylint: disable=reimported, redefined-outer-name
- import superset.cli.importexport # noqa: F811
-
- # reload to define export_datasets correctly based on the
- # feature flags
- importlib.reload(superset.cli.importexport)
-
- # write YAML file
- with open("dataset.yaml", "w") as fp:
- fp.write("hello: world")
-
- runner = app.test_cli_runner()
- response = runner.invoke(
- superset.cli.importexport.import_datasources,
- ["-p", "dataset.yaml", "-s", "columns"],
- )
-
- assert response.exit_code == 0
- expected_contents = {"dataset.yaml": "hello: world"}
- import_datasets_command.assert_called_with(
- expected_contents,
- sync_columns=True,
- sync_metrics=False,
- )
-
-
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
-)
-@mock.patch("superset.commands.dataset.importers.v0.ImportDatasetsCommand")
-def test_import_datasets_sync_argument_metrics(
- import_datasets_command, app_context, fs
-):
- """
- Test that the --sync command line argument syncs dataset in superset
- with YAML file. Using only metrics with the --sync flag
- """
- # pylint: disable=reimported, redefined-outer-name
- import superset.cli.importexport # noqa: F811
-
- # reload to define export_datasets correctly based on the
- # feature flags
- importlib.reload(superset.cli.importexport)
-
- # write YAML file
- with open("dataset.yaml", "w") as fp:
- fp.write("hello: world")
-
- runner = app.test_cli_runner()
- response = runner.invoke(
- superset.cli.importexport.import_datasources,
- ["-p", "dataset.yaml", "-s", "metrics"],
- )
-
- assert response.exit_code == 0
- expected_contents = {"dataset.yaml": "hello: world"}
- import_datasets_command.assert_called_with(
- expected_contents,
- sync_columns=False,
- sync_metrics=True,
- )
-
-
-@mock.patch.dict(
- "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": True}, clear=True
-)
@mock.patch(
"superset.commands.dataset.importers.dispatcher.ImportDatasetsCommand.run",
side_effect=Exception(),
diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py
index 6b435da997fa8..d809880bf7df5 100644
--- a/tests/integration_tests/dashboards/api_tests.py
+++ b/tests/integration_tests/dashboards/api_tests.py
@@ -36,7 +36,6 @@
from superset.reports.models import ReportSchedule, ReportScheduleType
from superset.models.slice import Slice
from superset.utils.core import backend, override_user
-from superset.views.base import generate_download_headers
from tests.integration_tests.conftest import with_feature_flags
from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin
@@ -1652,11 +1651,6 @@ def test_update_dashboard_not_owned(self):
db.session.delete(user_alpha2)
db.session.commit()
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": False},
- clear=True,
- )
@pytest.mark.usefixtures(
"load_world_bank_dashboard_with_slices",
"load_birth_names_dashboard_with_slices",
@@ -1671,8 +1665,8 @@ def test_export(self):
uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}"
rv = self.get_assert_metric(uri, "export")
- headers = generate_download_headers("json")["Content-Disposition"]
+ headers = f"attachment; filename=dashboard_export_20220101T000000.zip"
assert rv.status_code == 200
assert rv.headers["Content-Disposition"] == headers
diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py
index c9d2a41057ba3..395aebf29c61f 100644
--- a/tests/integration_tests/security_tests.py
+++ b/tests/integration_tests/security_tests.py
@@ -1348,7 +1348,6 @@ def assert_can_alpha(self, perm_set):
self.assert_can_all("CssTemplate", perm_set)
self.assert_can_all("Dataset", perm_set)
self.assert_can_read("Database", perm_set)
- self.assertIn(("can_import_dashboards", "Superset"), perm_set)
self.assertIn(("can_this_form_post", "CsvToDatabaseView"), perm_set)
self.assertIn(("can_this_form_get", "CsvToDatabaseView"), perm_set)
self.assert_can_menu("Manage", perm_set)