diff --git a/azext_edge/edge/_help.py b/azext_edge/edge/_help.py index fa2d4098e..881cbf5d8 100644 --- a/azext_edge/edge/_help.py +++ b/azext_edge/edge/_help.py @@ -525,7 +525,8 @@ def load_iotops_help(): for the existance of the associated custom location and cluster and ensure that both are set up correctly with the microsoft.deviceregistry.assets extension. - At least one data point or event must be defined during asset creation. + At least one data point or event must be defined during asset creation. For examples + of file formats, please see aka.ms/aziotops-assets examples: - name: Create an asset using the given custom location. @@ -586,9 +587,14 @@ def load_iotops_help(): data_source={data_source} name={name} observability_mode={observability_mode} sampling_interval={sampling_interval} queue_size={queue_size} --data data_source={data_source} + - name: Create an asset using a file containing data-points and another file containing events. + text: > + az iot ops asset create --name MyAsset -g MyRg --custom-location MyLocation --endpoint exampleEndpoint + --data-file /path/to/myasset_datapoints.json --event-file /path/to/myasset_events.csv + - name: Create an asset with the given pre-filled values. text: > - az iot ops asset create --name MyAsset -g MyRg --custom-location MyLocation --endpoint example.com + az iot ops asset create --name MyAsset -g MyRg --custom-location MyLocation --endpoint exampleEndpoint --data capability_id=myTagId data_source=NodeID1 name=myTagName1 observability_mode=counter sampling_interval=10 queue_size=2 --data data_source=NodeID2 --data-publish-int 1000 --data-queue-size 1 --data-sample-int 30 @@ -727,6 +733,7 @@ def load_iotops_help(): ] = """ type: command short-summary: Import data points in an asset. + long-summary: For examples of file formats, please see aka.ms/aziotops-assets examples: - name: Import all data points from a file. These data points will be appended to the asset's current data points. Data-points with duplicate dataSources will be ignored. text: > @@ -820,6 +827,7 @@ def load_iotops_help(): ] = """ type: command short-summary: Import events in an asset. + long-summary: For examples of file formats, please see aka.ms/aziotops-assets examples: - name: Import all events from a file. These events will be appended to the asset's current events. text: > diff --git a/azext_edge/edge/commands_assets.py b/azext_edge/edge/commands_assets.py index 7055353f4..450eec0dc 100644 --- a/azext_edge/edge/commands_assets.py +++ b/azext_edge/edge/commands_assets.py @@ -29,11 +29,13 @@ def create_asset( custom_location_resource_group: Optional[str] = None, custom_location_subscription: Optional[str] = None, data_points: Optional[List[str]] = None, + data_points_file_path: Optional[str] = None, description: Optional[str] = None, disabled: bool = False, display_name: Optional[str] = None, documentation_uri: Optional[str] = None, events: Optional[List[str]] = None, + events_file_path: Optional[List[str]] = None, external_asset_id: Optional[str] = None, hardware_revision: Optional[str] = None, location: Optional[str] = None, @@ -65,10 +67,12 @@ def create_asset( custom_location_resource_group=custom_location_resource_group, custom_location_subscription=custom_location_subscription, data_points=data_points, + data_points_file_path=data_points_file_path, description=description, disabled=disabled, display_name=display_name, documentation_uri=documentation_uri, + events_file_path=events_file_path, events=events, external_asset_id=external_asset_id, hardware_revision=hardware_revision, diff --git a/azext_edge/edge/common.py b/azext_edge/edge/common.py index a6a321abb..26f2ba7a8 100644 --- a/azext_edge/edge/common.py +++ b/azext_edge/edge/common.py @@ -198,14 +198,6 @@ def provider(self): return mapping[self] -class ClusterExtensionsMapping(Enum): - """ - Cluster extension mappings. - """ - - asset = "adr" - - class AEPAuthModes(Enum): """ Authentication modes for asset endpoints diff --git a/azext_edge/edge/params.py b/azext_edge/edge/params.py index a63500a7e..1f86c154d 100644 --- a/azext_edge/edge/params.py +++ b/azext_edge/edge/params.py @@ -694,6 +694,13 @@ def load_iotops_arguments(self, _): "--data can be used 1 or more times. Review help examples for full parameter usage", arg_group="Additional Info", ) + context.argument( + "data_points_file_path", + options_list=["--data-file", "--df"], + help="File path for the file containing the data points. The following file types are supported: " + f"{', '.join(FileType.list())}.", + arg_group="Additional Info", + ) context.argument( "description", options_list=["--description", "-d"], @@ -731,6 +738,13 @@ def load_iotops_arguments(self, _): "--event can be used 1 or more times. Review help examples for full parameter usage", arg_group="Additional Info", ) + context.argument( + "events_file_path", + options_list=["--event-file", "--ef"], + help="File path for the file containing the events. The following file types are supported: " + f"{', '.join(FileType.list())}.", + arg_group="Additional Info", + ) context.argument( "external_asset_id", options_list=["--external-asset-id", "--eai"], diff --git a/azext_edge/edge/providers/rpsaas/adr/assets.py b/azext_edge/edge/providers/rpsaas/adr/assets.py index a45580630..46c220120 100644 --- a/azext_edge/edge/providers/rpsaas/adr/assets.py +++ b/azext_edge/edge/providers/rpsaas/adr/assets.py @@ -46,11 +46,13 @@ def create( custom_location_resource_group: Optional[str] = None, custom_location_subscription: Optional[str] = None, data_points: Optional[List[str]] = None, + data_points_file_path: Optional[str] = None, description: Optional[str] = None, disabled: bool = False, display_name: Optional[str] = None, documentation_uri: Optional[str] = None, events: Optional[List[str]] = None, + events_file_path: Optional[List[str]] = None, external_asset_id: Optional[str] = None, hardware_revision: Optional[str] = None, location: Optional[str] = None, @@ -68,7 +70,7 @@ def create( ev_queue_size: int = 1, tags: Optional[Dict[str, str]] = None, ): - if not any([data_points, events]): + if not any([data_points, events, data_points_file_path, events_file_path]): raise RequiredArgumentMissingError(MISSING_DATA_EVENT_ERROR) extended_location = self.check_cluster_and_custom_location( custom_location_name=custom_location_name, @@ -88,6 +90,14 @@ def create( "dataPoints": _process_asset_sub_points("data_source", data_points), "events": _process_asset_sub_points("event_notifier", events), } + if data_points_file_path: + properties["dataPoints"].extend( + _process_asset_sub_points_file_path(file_path=data_points_file_path) + ) + if events_file_path: + properties["events"].extend( + _process_asset_sub_points_file_path(file_path=events_file_path) + ) # Other properties _update_properties( @@ -354,7 +364,6 @@ def import_sub_points( resource_group_name: str, replace: bool = False ): - from ....util import deserialize_file_content asset = self.show( resource_name=asset_name, resource_group_name=resource_group_name, @@ -363,8 +372,7 @@ def import_sub_points( if sub_point_type not in asset["properties"]: asset["properties"][sub_point_type] = [] - sub_points = list(deserialize_file_content(file_path=file_path)) - _convert_sub_points_from_csv(sub_points) + sub_points = _process_asset_sub_points_file_path(file_path=file_path) key = "dataSource" if sub_point_type == "dataPoints" else "eventNotifier" if replace: @@ -591,7 +599,7 @@ def _convert_sub_points_to_csv( return list(csv_conversion_map.values()) -def _process_asset_sub_points(required_arg: str, sub_points: Optional[List[str]]) -> Dict[str, str]: +def _process_asset_sub_points(required_arg: str, sub_points: Optional[List[str]]) -> List[Dict[str, str]]: """This is for the main create/update asset commands""" if not sub_points: return [] @@ -603,13 +611,7 @@ def _process_asset_sub_points(required_arg: str, sub_points: Optional[List[str]] if not parsed_points.get(required_arg): raise RequiredArgumentMissingError(f"{point_type} ({point}) is missing the {required_arg}.") - if parsed_points.get(invalid_arg) or ( - required_arg == "event_notifier" - and any([ - "capability_id" in parsed_points, - "sampling_interval" in parsed_points - ]) - ): + if parsed_points.get(invalid_arg): raise InvalidArgumentValueError(f"{point_type} does not support {invalid_arg}.") processed_point = _build_asset_sub_point(**parsed_points) @@ -618,6 +620,13 @@ def _process_asset_sub_points(required_arg: str, sub_points: Optional[List[str]] return processed_points +def _process_asset_sub_points_file_path(file_path: str) -> List[Dict[str, str]]: + from ....util import deserialize_file_content + sub_points = list(deserialize_file_content(file_path=file_path)) + _convert_sub_points_from_csv(sub_points) + return sub_points + + def _process_custom_attributes(current_attributes: Dict[str, str], custom_attributes: List[str]): custom_attributes = assemble_nargs_to_dict(custom_attributes) for key, value in custom_attributes.items(): diff --git a/azext_edge/edge/providers/rpsaas/adr/base.py b/azext_edge/edge/providers/rpsaas/adr/base.py index a515671bb..48986fd91 100644 --- a/azext_edge/edge/providers/rpsaas/adr/base.py +++ b/azext_edge/edge/providers/rpsaas/adr/base.py @@ -6,7 +6,7 @@ from knack.log import get_logger from ..base_provider import RPSaaSBaseProvider -from ....common import ClusterExtensionsMapping, ResourceProviderMapping +from ....common import ResourceProviderMapping logger = get_logger(__name__) ADR_API_VERSION = "2023-11-01-preview" @@ -21,5 +21,4 @@ def __init__( api_version=ADR_API_VERSION, provider_namespace=ResourceProviderMapping.deviceregistry.value, resource_type=resource_type, - required_extension=ClusterExtensionsMapping.asset.value ) diff --git a/azext_edge/edge/providers/rpsaas/base_provider.py b/azext_edge/edge/providers/rpsaas/base_provider.py index 2976fafa1..a4c17a5d1 100644 --- a/azext_edge/edge/providers/rpsaas/base_provider.py +++ b/azext_edge/edge/providers/rpsaas/base_provider.py @@ -37,7 +37,6 @@ def __init__( self, cmd, api_version: str, - required_extension: str, provider_namespace: str = "", parent_resource_path: str = "", resource_type: str = "", @@ -52,7 +51,6 @@ def __init__( self.provider_namespace = provider_namespace self.resource_type = resource_type self.parent_resource_path = parent_resource_path - self.required_extension = required_extension def delete( self, @@ -261,7 +259,6 @@ def check_cluster_and_custom_location( logger.warning(CLUSTER_OFFLINE_MSG.format(cluster["name"])) possible_locations = [] - extension_key = f"{self.required_extension}.enabled" for location in location_query_result: usable = False for extension_id in location["properties"]["clusterExtensionIds"]: @@ -270,10 +267,7 @@ def check_cluster_and_custom_location( resource_id=extension_id, api_version=EXTENSION_API_VERSION, ).as_dict() - if all([ - extension["properties"]["extensionType"] == IOT_OPS_EXTENSION, - extension["properties"]["configurationSettings"].get(extension_key, "false") == "true" - ]): + if extension["properties"]["extensionType"] == IOT_OPS_EXTENSION: usable = True break if usable: @@ -282,7 +276,7 @@ def check_cluster_and_custom_location( # throw if there are no suitable extensions (in the cluster) if len(possible_locations) == 0: raise ValidationError( - MISSING_EXTENSION_ERROR.format(cluster["name"], IOT_OPS_EXTENSION, self.required_extension) + MISSING_EXTENSION_ERROR.format(cluster["name"], IOT_OPS_EXTENSION) ) # throw if multiple custom locations (cluster name given, multiple locations possible) if len(possible_locations) > 1: diff --git a/azext_edge/edge/providers/rpsaas/user_strings.py b/azext_edge/edge/providers/rpsaas/user_strings.py index ddf9c1d13..e906c258f 100644 --- a/azext_edge/edge/providers/rpsaas/user_strings.py +++ b/azext_edge/edge/providers/rpsaas/user_strings.py @@ -11,7 +11,7 @@ "The command may fail." CLUSTER_OFFLINE_MSG = "Cluster {0} is not connected. The cluster may not update correctly." MISSING_CLUSTER_CUSTOM_LOCATION_ERROR = "Need to provide either cluster name or custom location" -MISSING_EXTENSION_ERROR = "Cluster {0} is missing the {1} extension with enabled {2} features." +MISSING_EXTENSION_ERROR = "Cluster {0} is missing the {1} extension." MULTIPLE_CUSTOM_LOCATIONS_ERROR = "The following custom locations were found for cluster {0}: \n{1}. "\ "Please specify which custom location to use." MULTIPLE_POSSIBLE_ITEMS_ERROR = "Found {0} {1}s with the name {2}. Please provide the resource group "\ diff --git a/azext_edge/tests/edge/rpsaas/adr/asset/test_asset_lifecycle_int.py b/azext_edge/tests/edge/rpsaas/adr/asset/test_asset_lifecycle_int.py index d90f8a7ae..2f9f7a67c 100644 --- a/azext_edge/tests/edge/rpsaas/adr/asset/test_asset_lifecycle_int.py +++ b/azext_edge/tests/edge/rpsaas/adr/asset/test_asset_lifecycle_int.py @@ -189,11 +189,11 @@ def test_asset_sub_point_lifecycle(require_init, tracked_resources, tracked_file assert_sub_point(asset_data_points[i], **expected_data_points[i]) for file_type in FileType.list(): - file_path = run( + data_file_path = run( f"az iot ops asset data-point export -a {asset_name} -g {rg} -f {file_type}" )["file_path"] - tracked_files.append(file_path) - assert os.path.exists(file_path) + tracked_files.append(data_file_path) + assert os.path.exists(data_file_path) asset_data_points = run( f"az iot ops asset data-point remove -a {asset_name} -g {rg} " @@ -202,7 +202,7 @@ def test_asset_sub_point_lifecycle(require_init, tracked_resources, tracked_file assert len(asset_data_points) + 1 == len(expected_data_points) asset_data_points = run( - f"az iot ops asset data-point import -a {asset_name} -g {rg} --input-file {file_path}" + f"az iot ops asset data-point import -a {asset_name} -g {rg} --input-file {data_file_path}" ) assert len(asset_data_points) == len(expected_data_points) assert expected_data_points[1]['data_source'] in [point["dataSource"] for point in asset_data_points] @@ -238,11 +238,11 @@ def test_asset_sub_point_lifecycle(require_init, tracked_resources, tracked_file assert_sub_point(asset_events[i], **expected_events[i]) for file_type in FileType.list(): - file_path = run( + event_file_path = run( f"az iot ops asset event export -a {asset_name} -g {rg} -f {file_type}" )["file_path"] - tracked_files.append(file_path) - assert os.path.exists(file_path) + tracked_files.append(event_file_path) + assert os.path.exists(event_file_path) asset_events = run( f"az iot ops asset event remove -a {asset_name} -g {rg} " @@ -251,11 +251,21 @@ def test_asset_sub_point_lifecycle(require_init, tracked_resources, tracked_file assert len(asset_events) + 1 == len(expected_events) asset_events = run( - f"az iot ops asset event import -a {asset_name} -g {rg} --input-file {file_path}" + f"az iot ops asset event import -a {asset_name} -g {rg} --input-file {event_file_path}" ) assert len(asset_events) == len(expected_events) assert expected_events[1]['event_notifier'] in [point["eventNotifier"] for point in asset_events] + second_asset = run( + f"az iot ops asset create -n {asset_name} -g {rg} -c {cluster_name} --cg {rg} " + f"--endpoint {endpoint_name} --data-file {data_file_path} --event-file {event_file_path}" + ) + tracked_resources.append(second_asset["id"]) + assert len(second_asset["properties"]["dataPoints"]) == len(expected_data_points) + assert_sub_point(second_asset["properties"]["dataPoints"][0], **expected_data_points[0]) + assert len(second_asset["properties"]["events"]) == len(expected_events) + assert_sub_point(second_asset["properties"]["events"][0], **expected_events[0]) + def assert_asset_props(result, **expected): assert result["name"] == expected["name"] diff --git a/azext_edge/tests/edge/rpsaas/adr/asset/test_create_unit.py b/azext_edge/tests/edge/rpsaas/adr/asset/test_create_unit.py index 2f408e8e4..9f6c1bef1 100644 --- a/azext_edge/tests/edge/rpsaas/adr/asset/test_create_unit.py +++ b/azext_edge/tests/edge/rpsaas/adr/asset/test_create_unit.py @@ -14,6 +14,7 @@ from azext_edge.edge.common import ResourceProviderMapping, ResourceTypeMapping from azext_edge.edge.providers.rpsaas.adr.base import ADR_API_VERSION +from .conftest import FULL_ASSET from .....generators import generate_random_string @@ -22,7 +23,7 @@ "resources.begin_create_or_update": {"result": generate_random_string()} }], ids=["create"], indirect=True) @pytest.mark.parametrize("asset_helpers_fixture", [{ - "process_asset_sub_points": generate_random_string(), + "process_asset_sub_points": [{generate_random_string(): generate_random_string()}], "update_properties": generate_random_string(), }], ids=["create helpers"], indirect=True) @pytest.mark.parametrize("req", [ @@ -41,9 +42,11 @@ "data_points": generate_random_string(), "description": generate_random_string(), "display_name": generate_random_string(), + "data_points_file_path": generate_random_string(), "disabled": True, "documentation_uri": generate_random_string(), "events": generate_random_string(), + "events_file_path": generate_random_string(), "external_asset_id": generate_random_string(), "hardware_revision": generate_random_string(), "location": generate_random_string(), @@ -72,7 +75,23 @@ "ev_queue_size": 888, }, ]) -def test_create_asset(mocker, mocked_cmd, mocked_resource_management_client, asset_helpers_fixture, req): +@pytest.mark.parametrize("mocked_deserialize_file_content", [[ + FULL_ASSET["properties"]["events"][0], + { + "capabilityId": generate_random_string(), + "dataPointConfiguration": "{\"samplingInterval\": 100}", + "dataSource": FULL_ASSET["properties"]["dataPoints"][1]["dataSource"], + "name": generate_random_string() + } +]], ids=["subPoints"], indirect=True) +def test_create_asset( + mocker, + mocked_cmd, + mocked_deserialize_file_content, + mocked_resource_management_client, + asset_helpers_fixture, + req +): patched_sp = asset_helpers_fixture["process_asset_sub_points"] patched_up = asset_helpers_fixture["update_properties"] patched_cap = mocker.patch( @@ -150,10 +169,19 @@ def test_create_asset(mocker, mocked_cmd, mocked_resource_management_client, ass # Data points + events assert patched_sp.call_args_list[0].args[0] == "data_source" assert patched_sp.call_args_list[0].args[1] == req.get("data_points") - assert request_props["dataPoints"] == patched_sp.return_value + expected_data_points = patched_sp.return_value + if req.get("data_points_file_path"): + mocked_deserialize_file_content.assert_any_call(file_path=req["data_points_file_path"]) + expected_data_points.extend(mocked_deserialize_file_content.return_value) + assert request_props["dataPoints"] == expected_data_points + assert patched_sp.call_args_list[1].args[0] == "event_notifier" assert patched_sp.call_args_list[1].args[1] == req.get("events") - assert request_props["events"] == patched_sp.return_value + expected_events = patched_sp.return_value + if req.get("events_file_path"): + mocked_deserialize_file_content.assert_any_call(file_path=req["events_file_path"]) + expected_events.extend(mocked_deserialize_file_content.return_value) + assert request_props["events"] == expected_events def test_create_asset_error(mocked_cmd): diff --git a/azext_edge/tests/edge/rpsaas/base/provider/test_check_cluster_and_custom_location_unit.py b/azext_edge/tests/edge/rpsaas/base/provider/test_check_cluster_and_custom_location_unit.py index 7eca4fe98..6d64a849c 100644 --- a/azext_edge/tests/edge/rpsaas/base/provider/test_check_cluster_and_custom_location_unit.py +++ b/azext_edge/tests/edge/rpsaas/base/provider/test_check_cluster_and_custom_location_unit.py @@ -68,8 +68,7 @@ def test_check_cluster_and_custom_location( provider = RPSaaSBaseProvider( mocked_cmd, - generate_random_string(), - required_extension=extension_key + generate_random_string() ) custom_location_name = req.get("custom_location_name") @@ -225,29 +224,24 @@ def test_check_cluster_and_custom_location_build_query_error( }] }, ], ids=["build_query"], indirect=True) -@pytest.mark.parametrize("extension_enabled", [True, False], ids=["invalid_type", "not_enabled"]) def test_check_cluster_and_custom_location_no_extension_error( mocker, mocked_cmd, mocked_build_query, mocked_resource_management_client, - extension_enabled, ): from azext_edge.edge.providers.rpsaas.base_provider import RPSaaSBaseProvider - extension_key = generate_random_string() ext_result = mocker.Mock() ext_result.as_dict.return_value = { "properties": { - "configurationSettings": {f"{extension_key}.enabled": str(extension_enabled).lower()}, - "extensionType": generate_random_string() if extension_enabled else "microsoft.iotoperations" + "extensionType": generate_random_string() } } mocked_resource_management_client.resources.get_by_id.return_value = ext_result provider = RPSaaSBaseProvider( mocked_cmd, - generate_random_string(), - required_extension=extension_key + generate_random_string() ) from azext_edge.edge.providers.rpsaas.adr.base import ADRBaseProvider diff --git a/azext_edge/tests/edge/rpsaas/base/provider/test_delete_unit.py b/azext_edge/tests/edge/rpsaas/base/provider/test_delete_unit.py index 69545f4d0..dfd6f70ef 100644 --- a/azext_edge/tests/edge/rpsaas/base/provider/test_delete_unit.py +++ b/azext_edge/tests/edge/rpsaas/base/provider/test_delete_unit.py @@ -34,8 +34,7 @@ def test_delete( api_version=api_version, provider_namespace=provider_namespace, resource_type=resource_type, - parent_resource_path=parent_resource_path, - required_extension=generate_random_string() + parent_resource_path=parent_resource_path ) result = provider.delete(resource_name, resource_group_name, check_cluster_connectivity) diff --git a/azext_edge/tests/edge/rpsaas/base/provider/test_list_unit.py b/azext_edge/tests/edge/rpsaas/base/provider/test_list_unit.py index fab8ede48..cb958daee 100644 --- a/azext_edge/tests/edge/rpsaas/base/provider/test_list_unit.py +++ b/azext_edge/tests/edge/rpsaas/base/provider/test_list_unit.py @@ -36,8 +36,7 @@ def test_list_assets( api_version=api_version, provider_namespace=provider_namespace, resource_type=resource_type, - parent_resource_path=parent_resource_path, - required_extension=generate_random_string() + parent_resource_path=parent_resource_path ) result = provider.list(resource_group) diff --git a/azext_edge/tests/edge/rpsaas/base/provider/test_show_unit.py b/azext_edge/tests/edge/rpsaas/base/provider/test_show_unit.py index 78a81ffe5..c95e4b314 100644 --- a/azext_edge/tests/edge/rpsaas/base/provider/test_show_unit.py +++ b/azext_edge/tests/edge/rpsaas/base/provider/test_show_unit.py @@ -33,8 +33,7 @@ def test_show( api_version=api_version, provider_namespace=provider_namespace, resource_type=resource_type, - parent_resource_path=parent_resource_path, - required_extension=generate_random_string() + parent_resource_path=parent_resource_path ) result = provider.show(resource_name, resource_group_name, check_cluster_connectivity)