diff --git a/superset/cli.py b/superset/cli.py index 0382054daa27b..c97bbb6b42fdd 100755 --- a/superset/cli.py +++ b/superset/cli.py @@ -168,7 +168,7 @@ def load_examples_run( examples.load_big_data() # load examples that are stored as YAML config files - examples.load_from_configs(force, load_test_data) + examples.load_examples_from_configs(force, load_test_data) @with_appcontext @@ -187,10 +187,28 @@ def load_examples( only_metadata: bool = False, force: bool = False, ) -> None: - """Loads a set of Slices and Dashboards and a supporting dataset """ + """Loads a set of Slices and Dashboards and a supporting dataset""" load_examples_run(load_test_data, load_big_data, only_metadata, force) +@with_appcontext +@superset.command() +@click.argument("directory") +@click.option( + "--overwrite", "-o", is_flag=True, help="Overwriting existing metadata definitions" +) +@click.option( + "--force", "-f", is_flag=True, help="Force load data even if table already exists" +) +def import_directory(directory: str, overwrite: bool, force: bool) -> None: + """Imports configs from a given directory""" + from superset.examples.utils import load_configs_from_directory + + load_configs_from_directory( + root=Path(directory), overwrite=overwrite, force_data=force, + ) + + @with_appcontext @superset.command() @click.option("--database_name", "-d", help="Database name to change") diff --git a/superset/datasets/commands/importers/v1/utils.py b/superset/datasets/commands/importers/v1/utils.py index 6b4dbeb7b8e74..d61be518a25eb 100644 --- a/superset/datasets/commands/importers/v1/utils.py +++ b/superset/datasets/commands/importers/v1/utils.py @@ -119,10 +119,12 @@ def import_dataset( example_database = get_example_database() try: table_exists = example_database.has_table_by_name(dataset.table_name) - except Exception as ex: + except Exception: # pylint: disable=broad-except # MySQL doesn't play nice with GSheets table names - logger.warning("Couldn't check if table %s exists, stopping import") - raise ex + logger.warning( + "Couldn't check if table %s exists, assuming it does", dataset.table_name + ) + table_exists = True if data_uri and (not table_exists or force_data): load_data(data_uri, dataset, example_database, session) diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index a857efec8f634..a73adb64f9e3c 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -167,5 +167,6 @@ class ImportV1DatasetSchema(Schema): columns = fields.List(fields.Nested(ImportV1ColumnSchema)) metrics = fields.List(fields.Nested(ImportV1MetricSchema)) version = fields.String(required=True) - database_uuid = fields.UUID(required=True) + # TODO (betodealmeida): disallow None when we have all imports being done by configs + database_uuid = fields.UUID(required=True, allow_none=True) data = fields.URL() diff --git a/superset/examples/__init__.py b/superset/examples/__init__.py index 161a52f4b4d19..a7742b0ef7dff 100644 --- a/superset/examples/__init__.py +++ b/superset/examples/__init__.py @@ -30,5 +30,5 @@ from .random_time_series import load_random_time_series_data from .sf_population_polygons import load_sf_population_polygons from .tabbed_dashboard import load_tabbed_dashboard -from .utils import load_from_configs +from .utils import load_examples_from_configs from .world_bank import load_world_bank_health_n_pop diff --git a/superset/examples/utils.py b/superset/examples/utils.py index 66ca811df2d35..4fc3feb1b59d5 100644 --- a/superset/examples/utils.py +++ b/superset/examples/utils.py @@ -18,14 +18,21 @@ from pathlib import Path from typing import Any, Dict +import yaml from pkg_resources import resource_isdir, resource_listdir, resource_stream from superset.commands.importers.v1.examples import ImportExamplesCommand +from superset.commands.importers.v1.utils import METADATA_FILE_NAME YAML_EXTENSIONS = {".yaml", ".yml"} -def load_from_configs(force_data: bool = False, load_test_data: bool = False) -> None: +def load_examples_from_configs( + force_data: bool = False, load_test_data: bool = False +) -> None: + """ + Load all the examples inside superset/examples/configs/. + """ contents = load_contents(load_test_data) command = ImportExamplesCommand(contents, overwrite=True, force_data=force_data) command.run() @@ -55,3 +62,32 @@ def load_contents(load_test_data: bool = False) -> Dict[str, Any]: ) return {str(path.relative_to(root)): content for path, content in contents.items()} + + +def load_configs_from_directory( + root: Path, overwrite: bool = True, force_data: bool = False +) -> None: + """ + Load all the examples from a given directory. + """ + contents: Dict[str, str] = {} + queue = [root] + while queue: + path_name = queue.pop() + if path_name.is_dir(): + queue.extend(path_name.glob("*")) + elif path_name.suffix.lower() in YAML_EXTENSIONS: + with open(path_name) as fp: + contents[str(path_name.relative_to(root))] = fp.read() + + # removing "type" from the metadata allows us to import any exported model + # from the unzipped directory directly + metadata = yaml.load(contents.get(METADATA_FILE_NAME, "{}")) + if "type" in metadata: + del metadata["type"] + contents[METADATA_FILE_NAME] = yaml.dump(metadata) + + command = ImportExamplesCommand( + contents, overwrite=overwrite, force_data=force_data + ) + command.run()