From 25f7e4f136133e6b3cc93f3707ab0041ece78614 Mon Sep 17 00:00:00 2001 From: Victoria Litvinova Date: Fri, 27 Sep 2024 11:25:54 -0700 Subject: [PATCH 1/2] help, points, events, life --- azext_edge/edge/_help.py | 30 ++-- azext_edge/edge/commands_assets.py | 81 +--------- azext_edge/edge/params.py | 31 +++- .../edge/providers/rpsaas/adr/assets.py | 26 +++- .../edge/providers/rpsaas/adr/user_strings.py | 4 + azext_edge/tests/edge/rpsaas/adr/conftest.py | 2 + .../tests/edge/rpsaas/adr/test_assets_unit.py | 146 ++++++++++++++---- 7 files changed, 184 insertions(+), 136 deletions(-) diff --git a/azext_edge/edge/_help.py b/azext_edge/edge/_help.py index 00ac9eb1a..bc04f32c4 100644 --- a/azext_edge/edge/_help.py +++ b/azext_edge/edge/_help.py @@ -819,6 +819,7 @@ def load_iotops_help(): ] = """ type: group short-summary: Manage datasets in an asset. + long-summary: A dataset will be created once a point is created. See `az iot ops asset dataset point add` for more details. """ helps[ @@ -842,7 +843,7 @@ def load_iotops_help(): examples: - name: Show the details of a dataset in an asset. text: > - az iot ops asset dataset show -g myresourcegroup --asset myasset -n dataset1 + az iot ops asset dataset show -g myresourcegroup --asset myasset -n default """ helps[ @@ -857,16 +858,17 @@ def load_iotops_help(): ] = """ type: command short-summary: Add a data point to an asset dataset. + long-summary: If no datasets exist yet, this will create a new dataset. Currently, only one dataset is supported with the name "default". examples: - name: Add a data point to an asset. text: > - az iot ops asset dataset point add --asset myasset -g myresourcegroup --dataset dataset1 --data-source mydatasource --name data1 + az iot ops asset dataset point add --asset myasset -g myresourcegroup --dataset default --data-source mydatasource --name data1 - name: Add a data point to an asset with data point name, observability mode, custom queue size, and custom sampling interval. text: > - az iot ops asset dataset point add --asset myasset -g myresourcegroup --dataset dataset1 --data-source mydatasource --name data1 + az iot ops asset dataset point add --asset myasset -g myresourcegroup --dataset default --data-source mydatasource --name data1 --observability-mode log --queue-size 5 --sampling-interval 200 """ @@ -879,13 +881,13 @@ def load_iotops_help(): examples: - name: Export all data-points in an asset in JSON format. text: > - az iot ops asset dataset point export --asset myasset -g myresourcegroup --dataset dataset1 + az iot ops asset dataset point export --asset myasset -g myresourcegroup --dataset default - name: Export all data-points in an asset in CSV format in a specific output directory that can be uploaded via the Digital Operations Experience. text: > - az iot ops asset dataset point export --asset myasset -g myresourcegroup --dataset dataset1 --format csv --output-dir myAssetsFiles + az iot ops asset dataset point export --asset myasset -g myresourcegroup --dataset default --format csv --output-dir myAssetsFiles - name: Export all data-points in an asset in YAML format. Replace the file if one is present already. text: > - az iot ops asset dataset point export --asset myasset -g myresourcegroup --dataset dataset1 --format yaml --replace + az iot ops asset dataset point export --asset myasset -g myresourcegroup --dataset default --format yaml --replace """ helps[ @@ -895,12 +897,12 @@ def load_iotops_help(): short-summary: Import data-points in an asset dataset. long-summary: For examples of file formats, please see aka.ms/aziotops-assets examples: - - name: Import all data-points from a file. These data-points will be appended to the asset dataset's current data-points. Data-points with duplicate dataSources will be ignored. + - name: Import all data-points from a file. These data-points will be appended to the asset dataset's current data-points. Data-points with duplicate names will be ignored. text: > - az iot ops asset dataset point import --asset myasset -g myresourcegroup --dataset dataset1 --input-file myasset_dataset1_dataPoints.csv - - name: Import all data-points from a file. These data-points will be appended to the asset dataset's current data-points. Data-points with duplicate dataSources will be replaced. + az iot ops asset dataset point import --asset myasset -g myresourcegroup --dataset default --input-file myasset_default_dataPoints.csv + - name: Import all data-points from a file. These data-points will be appended to the asset dataset's current data-points. Data-points with duplicate names will replace the current asset data-points. text: > - az iot ops asset dataset point import --asset myasset -g myresourcegroup --dataset dataset1 --input-file myasset_dataset1_dataPoints.json --replace + az iot ops asset dataset point import --asset myasset -g myresourcegroup --dataset default --input-file myasset_default_dataPoints.json --replace """ helps[ @@ -911,7 +913,7 @@ def load_iotops_help(): examples: - name: List all points in an asset dataset. text: > - az iot ops asset dataset point list --asset myasset -g myresourcegroup --dataset dataset1 + az iot ops asset dataset point list --asset myasset -g myresourcegroup --dataset default """ helps[ @@ -923,7 +925,7 @@ def load_iotops_help(): examples: - name: Remove a data point from an asset via the data point name. text: > - az iot ops asset dataset point remove --asset myasset -g myresourcegroup --dataset dataset1 --name data1 + az iot ops asset dataset point remove --asset myasset -g myresourcegroup --dataset default --name data1 """ helps[ @@ -976,10 +978,10 @@ def load_iotops_help(): short-summary: Import events in an asset. long-summary: For examples of file formats, please see aka.ms/aziotops-assets examples: - - name: Import all events from a file. These events will be appended to the asset's current events. + - name: Import all events from a file. These events will be appended to the asset's current events. Events with duplicate names will be ignored. text: > az iot ops asset event import --asset myasset -g myresourcegroup --input-file myasset_events.yaml - - name: Import all events from a file. These events will replace the asset's current events. + - name: Import all events from a file. These events will appended the asset's current events. Events with duplicate names will replace the current asset events. text: > az iot ops asset event import --asset myasset -g myresourcegroup --input-file myasset_events.csv --replace """ diff --git a/azext_edge/edge/commands_assets.py b/azext_edge/edge/commands_assets.py index 17f1178c5..c77d02655 100644 --- a/azext_edge/edge/commands_assets.py +++ b/azext_edge/edge/commands_assets.py @@ -212,66 +212,6 @@ def update_asset( # Dataset commands -# TODO: multi dataset support -# def add_asset_dataset( -# cmd, -# asset_name: str, -# dataset_name: str, -# resource_group_name: str, -# data_points: Optional[List[str]] = None, -# data_point_file_path: Optional[str] = None, -# queue_size: Optional[int] = None, -# sampling_interval: Optional[int] = None, -# publishing_interval: Optional[int] = None, -# topic_path: Optional[str] = None, -# topic_retain: Optional[str] = None -# ): -# return Assets(cmd).add_dataset( -# asset_name=asset_name, -# dataset_name=dataset_name, -# resource_group_name=resource_group_name, -# data_points=data_points, -# data_point_file_path=data_point_file_path, -# queue_size=queue_size, -# sampling_interval=sampling_interval, -# publishing_interval=publishing_interval, -# topic_path=topic_path, -# topic_retain=topic_retain, -# ) - - -# def export_asset_datasets( -# cmd, -# asset_name: str, -# resource_group_name: str, -# extension: str = "json", -# output_dir: str = ".", -# replace: bool = False -# ): -# return Assets(cmd).export_datasets( -# asset_name=asset_name, -# resource_group_name=resource_group_name, -# extension=extension, -# output_dir=output_dir, -# replace=replace -# ) - - -# def import_asset_datasets( -# cmd, -# asset_name: str, -# file_path: str, -# resource_group_name: str, -# replace: bool = False -# ): -# return Assets(cmd).import_datasets( -# asset_name=asset_name, -# file_path=file_path, -# resource_group_name=resource_group_name, -# replace=replace -# ) - - def list_asset_datasets( cmd, asset_name: str, @@ -296,19 +236,6 @@ def show_asset_dataset( ) -# def remove_asset_dataset( -# cmd, -# asset_name: str, -# dataset_name: str, -# resource_group_name: str -# ): -# return Assets(cmd).remove_dataset( -# asset_name=asset_name, -# dataset_name=dataset_name, -# resource_group_name=resource_group_name -# ) - - # Data Point sub commands def add_asset_data_point( cmd, @@ -320,6 +247,7 @@ def add_asset_data_point( observability_mode: Optional[str] = None, queue_size: Optional[int] = None, sampling_interval: Optional[int] = None, + replace: Optional[bool] = None ): return Assets(cmd).add_dataset_data_point( asset_name=asset_name, @@ -330,6 +258,7 @@ def add_asset_data_point( queue_size=queue_size, sampling_interval=sampling_interval, resource_group_name=resource_group_name, + replace=replace ) @@ -407,8 +336,7 @@ def add_asset_event( observability_mode: Optional[str] = None, queue_size: Optional[int] = None, sampling_interval: Optional[int] = None, # Note: not in DOE - # topic_path: Optional[str] = None, # TODO: expose once supported - # topic_retain: Optional[str] = None + replace: Optional[bool] = None ): return Assets(cmd).add_event( asset_name=asset_name, @@ -418,8 +346,7 @@ def add_asset_event( queue_size=queue_size, sampling_interval=sampling_interval, resource_group_name=resource_group_name, - # topic_path=topic_path, - # topic_retain=topic_retain + replace=replace, ) diff --git a/azext_edge/edge/params.py b/azext_edge/edge/params.py index 8accf39de..e7033ab13 100644 --- a/azext_edge/edge/params.py +++ b/azext_edge/edge/params.py @@ -609,7 +609,7 @@ def load_iotops_arguments(self, _): options_list=["--data"], nargs="+", action="append", - help="Space-separated key=value pairs corresponding to properties of the data point to create. " + help="Space-separated key=value pairs corresponding to properties of the data-point to create. " "The following key values are supported: `data_source` (required), `name` (required), " "`observability_mode` (None, Gauge, Counter, Histogram, or Log), `sampling_interval` (int), " "`queue_size` (int). " @@ -619,7 +619,7 @@ def load_iotops_arguments(self, _): context.argument( "data_points_file_path", options_list=["--data-file", "--df"], - help="File path for the file containing the data points. The following file types are supported: " + help="File path for the file containing the data-points. The following file types are supported: " f"{', '.join(FileType.list())}.", arg_group="Data-point", ) @@ -843,7 +843,7 @@ def load_iotops_arguments(self, _): context.argument( "capability_id", options_list=["--capability-id", "--ci"], - help="Capability Id. If not provided, data point name will be used.", + help="Capability Id. If not provided, data-point name will be used.", ) context.argument( "dataset_name", @@ -866,6 +866,14 @@ def load_iotops_arguments(self, _): help="Observability mode. Must be none, gauge, counter, histogram, or log.", ) + with self.argument_context("iot ops asset dataset point add") as context: + context.argument( + "replace", + options_list=["--replace"], + help="Replace the data-point if another data-point with the same name is present already.", + arg_type=get_three_state_flag(), + ) + with self.argument_context("iot ops asset dataset point export") as context: context.argument( "replace", @@ -878,14 +886,14 @@ def load_iotops_arguments(self, _): context.argument( "replace", options_list=["--replace"], - help="Replace all asset data points with those from the file. If false, the file data points " - "will be appended.", + help="Replace duplicate asset data-points with those from the file. If false, the file data-points " + "will be ignored. Duplicate asset data-points will be determined by name.", arg_type=get_three_state_flag(), ) context.argument( "file_path", options_list=["--input-file", "--if"], - help="File path for the file containing the data points. The following file types are supported: " + help="File path for the file containing the data-points. The following file types are supported: " f"{', '.join(FileType.list())}.", ) @@ -916,6 +924,14 @@ def load_iotops_arguments(self, _): help="Observability mode. Must be none or log.", ) + with self.argument_context("iot ops asset event add") as context: + context.argument( + "replace", + options_list=["--replace"], + help="Replace the event if another event with the same name is already present.", + arg_type=get_three_state_flag(), + ) + with self.argument_context("iot ops asset event export") as context: context.argument( "replace", @@ -928,7 +944,8 @@ def load_iotops_arguments(self, _): context.argument( "replace", options_list=["--replace"], - help="Replace all asset events with those from the file. If false, the file events will be appended.", + help="Replace duplicate asset events with those from the file. If false, the file events " + "will be ignored. Duplicate asset events will be determined by name.", arg_type=get_three_state_flag(), ) context.argument( diff --git a/azext_edge/edge/providers/rpsaas/adr/assets.py b/azext_edge/edge/providers/rpsaas/adr/assets.py index adebbda71..d2d8c130b 100644 --- a/azext_edge/edge/providers/rpsaas/adr/assets.py +++ b/azext_edge/edge/providers/rpsaas/adr/assets.py @@ -14,7 +14,7 @@ ) # from azure.core.exceptions import ResourceNotFoundError -from .user_strings import INVALID_OBSERVABILITY_MODE_ERROR +from .user_strings import DUPLICATE_EVENT_ERROR, DUPLICATE_POINT_ERROR, INVALID_OBSERVABILITY_MODE_ERROR from ....util import assemble_nargs_to_dict from ....common import FileType from ....util.az_client import get_registry_mgmt_client, wait_for_terminal_state @@ -368,6 +368,7 @@ def add_dataset_data_point( observability_mode: Optional[str] = None, queue_size: Optional[int] = None, sampling_interval: Optional[int] = None, + replace: bool = False ): asset = self.show( asset_name=asset_name, @@ -375,9 +376,12 @@ def add_dataset_data_point( check_cluster=True ) dataset = _get_dataset(asset, dataset_name, create_if_none=True) - if not dataset.get("dataPoints"): - dataset["dataPoints"] = [] - + dataset["dataPoints"] = dataset.get("dataPoints", []) + point_names = [point["name"] for point in dataset["dataPoints"]] + if not replace and data_point_name in point_names: + raise InvalidArgumentValueError( + DUPLICATE_POINT_ERROR.format(data_point_name) + ) sub_point = _build_asset_sub_point( data_source=data_source, name=data_point_name, @@ -455,7 +459,7 @@ def import_dataset_data_points( dataset["dataPoints"] = _process_asset_sub_points_file_path( file_path=file_path, original_items=dataset.get("dataPoints", []), - point_key="dataSource", + point_key="name", replace=replace ) @@ -524,7 +528,8 @@ def add_event( queue_size: Optional[int] = None, sampling_interval: Optional[int] = None, topic_path: Optional[str] = None, - topic_retain: Optional[str] = None + topic_retain: Optional[str] = None, + replace: bool = False ): asset = self.show( asset_name=asset_name, @@ -532,6 +537,12 @@ def add_event( check_cluster=True ) + asset["properties"]["events"] = asset["properties"].get("events", []) + event_names = [event["name"] for event in asset["properties"]["events"]] + if not replace and event_name in event_names: + raise InvalidArgumentValueError( + DUPLICATE_EVENT_ERROR.format(event_name) + ) sub_point = _build_asset_sub_point( event_notifier=event_notifier, name=event_name, @@ -544,7 +555,6 @@ def add_event( "path": topic_path, "retain": topic_retain or "Never" } - asset["properties"]["events"] = asset["properties"].get("events", []) asset["properties"]["events"].append(sub_point) # note that update does not return the properties @@ -607,7 +617,7 @@ def import_events( asset["properties"]["events"] = _process_asset_sub_points_file_path( file_path=file_path, original_items=asset["properties"].get("events", []), - point_key="eventNotifier", + point_key="name", replace=replace ) diff --git a/azext_edge/edge/providers/rpsaas/adr/user_strings.py b/azext_edge/edge/providers/rpsaas/adr/user_strings.py index 93f07fea9..c6db09d48 100644 --- a/azext_edge/edge/providers/rpsaas/adr/user_strings.py +++ b/azext_edge/edge/providers/rpsaas/adr/user_strings.py @@ -5,6 +5,10 @@ # ---------------------------------------------------------------------------------------------- # Asset Strings +DUPLICATE_EVENT_ERROR = "An event with the name {0} is already present. Please use a different name for "\ + "your event or --replace." +DUPLICATE_POINT_ERROR = "A data-point with the name {0} is already present. Please use a different name for "\ + "your data-point or --replace." ENDPOINT_NOT_FOUND_WARNING = "Endpoint {0} not found. The asset may fail provisioning." INVALID_OBSERVABILITY_MODE_ERROR = "{0} has an invalid observability mode [{1}]." MISSING_DATA_EVENT_ERROR = "At least one data point or event is required to create the asset." diff --git a/azext_edge/tests/edge/rpsaas/adr/conftest.py b/azext_edge/tests/edge/rpsaas/adr/conftest.py index c815369bb..b1ab4aef6 100644 --- a/azext_edge/tests/edge/rpsaas/adr/conftest.py +++ b/azext_edge/tests/edge/rpsaas/adr/conftest.py @@ -222,6 +222,7 @@ def get_profile_record( "observabilityMode": generate_random_string() }, { + "name": generate_random_string(), "dataPointConfiguration": "{}", "dataSource": generate_random_string(), }, @@ -252,6 +253,7 @@ def get_profile_record( "observabilityMode": generate_random_string() }, { + "name": generate_random_string(), "eventConfiguration": "{}", "eventNotifier": generate_random_string(), }, diff --git a/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py b/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py index c45e8932f..cd23df9db 100644 --- a/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py +++ b/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py @@ -9,6 +9,7 @@ import pytest import responses +from azure.cli.core.azclierror import InvalidArgumentValueError from azext_edge.edge.commands_assets import ( create_asset, delete_asset, @@ -383,6 +384,7 @@ def test_dataset_show( @pytest.mark.parametrize("observability_mode", [None, "log"]) @pytest.mark.parametrize("queue_size", [True, 2]) @pytest.mark.parametrize("sampling_interval", [True, 1000]) +@pytest.mark.parametrize("replace", [False, True]) def test_data_point_add( mocked_cmd, mocked_responses: responses, @@ -390,7 +392,8 @@ def test_data_point_add( dataset_present, observability_mode, queue_size, - sampling_interval + sampling_interval, + replace ): dataset_name = "default" asset_name = generate_random_string() @@ -403,8 +406,14 @@ def test_data_point_add( if dataset_present: dataset = { "name": dataset_name, - "dataPoints": [{generate_random_string(): generate_random_string()}] + "dataPoints": [ + {"name": generate_random_string(), generate_random_string(): generate_random_string()} + ] } + if replace: + dataset["dataPoints"].append({ + "name": data_point_name, generate_random_string(): generate_random_string() + }) mock_asset_record["properties"]["datasets"] = [dataset] mocked_responses.add( method=responses.GET, @@ -433,7 +442,8 @@ def test_data_point_add( data_source=data_source, observability_mode=observability_mode, queue_size=queue_size, - sampling_interval=sampling_interval + sampling_interval=sampling_interval, + replace=replace ) assert result == result_datapoints datasets = json.loads(mocked_responses.calls[-1].request.body)["properties"]["datasets"] @@ -447,6 +457,44 @@ def test_data_point_add( assert custom_config.get("samplingInterval") == sampling_interval +def test_data_point_add_error( + mocked_cmd, + mocked_responses: responses, + mocked_check_cluster_connectivity, +): + dataset_name = "default" + asset_name = generate_random_string() + resource_group_name = generate_random_string() + data_point_name = generate_random_string() + mock_asset_record = get_asset_record( + asset_name=asset_name, asset_resource_group=resource_group_name + ) + dataset = { + "name": dataset_name, + "dataPoints": [ + {"name": data_point_name, generate_random_string(): generate_random_string()}, + {"name": generate_random_string(), generate_random_string():generate_random_string()} + ] + } + mock_asset_record["properties"]["datasets"] = [dataset] + mocked_responses.add( + method=responses.GET, + url=get_asset_mgmt_uri(asset_name=asset_name, asset_resource_group=resource_group_name), + json=mock_asset_record, + status=200, + content_type="application/json", + ) + with pytest.raises(InvalidArgumentValueError): + add_asset_data_point( + cmd=mocked_cmd, + dataset_name=dataset_name, + asset_name=asset_name, + resource_group_name=resource_group_name, + data_point_name=data_point_name, + data_source=generate_random_string(), + ) + + @pytest.mark.parametrize("data_points_present", [True, False]) @pytest.mark.parametrize("extension", FileType.list()) @pytest.mark.parametrize("output_dir", [None, generate_random_string()]) @@ -542,7 +590,7 @@ def test_data_point_import( dataset_name = "default" asset_name = generate_random_string() resource_group_name = generate_random_string() - dup_data_source = generate_random_string() + dup_name = generate_random_string() file_path = generate_random_string() mock_asset_record = get_asset_record( asset_name=asset_name, asset_resource_group=resource_group_name @@ -552,8 +600,8 @@ def test_data_point_import( "dataPoints": [ { "dataPointConfiguration": "{\"samplingInterval\": 300, \"queueSize\": 30}", - "dataSource": dup_data_source, - "name": generate_random_string(), + "dataSource": generate_random_string(), + "name": dup_name, "observabilityMode": generate_random_string() }, { @@ -569,8 +617,8 @@ def test_data_point_import( "dataPoints": [ { "dataPointConfiguration": "{\"samplingInterval\": 100, \"queueSize\": 50}", - "dataSource": dup_data_source, - "name": generate_random_string(), + "dataSource": generate_random_string(), + "name": dup_name, "observabilityMode": generate_random_string() }, { @@ -613,19 +661,19 @@ def test_data_point_import( mocked_deserialize_file_content.assert_called_once_with(file_path=file_path) datasets = json.loads(mocked_responses.calls[-1].request.body)["properties"]["datasets"] assert datasets - point_map = {point["dataSource"]: point for point in datasets[0]["dataPoints"]} - assert file_dataset["dataPoints"][1]["dataSource"] in point_map - assert dup_data_source in point_map + point_map = {point["name"]: point for point in datasets[0]["dataPoints"]} + assert file_dataset["dataPoints"][1]["name"] in point_map + assert dup_name in point_map # check the duplicate point if replace: point = file_dataset["dataPoints"][0] - assert file_dataset["dataPoints"][1]["dataSource"] in point_map + assert file_dataset["dataPoints"][1]["name"] in point_map else: point = cloud_dataset["dataPoints"][0] - assert cloud_dataset["dataPoints"][1]["dataSource"] in point_map - assert point_map[dup_data_source]["dataPointConfiguration"] == point["dataPointConfiguration"] - assert point_map[dup_data_source]["name"] == point["name"] - assert point_map[dup_data_source]["observabilityMode"] == point["observabilityMode"] + assert cloud_dataset["dataPoints"][1]["name"] in point_map + assert point_map[dup_name]["dataPointConfiguration"] == point["dataPointConfiguration"] + assert point_map[dup_name]["dataSource"] == point["dataSource"] + assert point_map[dup_name]["observabilityMode"] == point["observabilityMode"] @pytest.mark.parametrize("data_points_present", [True, False]) @@ -726,13 +774,15 @@ def test_data_point_remove( @pytest.mark.parametrize("observability_mode", [None, "log"]) @pytest.mark.parametrize("queue_size", [True, 2]) @pytest.mark.parametrize("sampling_interval", [True, 1000]) +@pytest.mark.parametrize("replace", [False, True]) def test_event_add( mocked_cmd, mocked_responses: responses, mocked_check_cluster_connectivity, observability_mode, queue_size, - sampling_interval + sampling_interval, + replace ): asset_name = generate_random_string() resource_group_name = generate_random_string() @@ -742,6 +792,10 @@ def test_event_add( asset_name=asset_name, asset_resource_group=resource_group_name ) + if replace: + mock_asset_record["events"] = [{ + "name": event_name, generate_random_string(): generate_random_string() + }] mocked_responses.add( method=responses.GET, url=get_asset_mgmt_uri(asset_name=asset_name, asset_resource_group=resource_group_name), @@ -778,6 +832,38 @@ def test_event_add( assert custom_config.get("samplingInterval") == sampling_interval +def test_event_add_error( + mocked_cmd, + mocked_responses: responses, + mocked_check_cluster_connectivity, +): + asset_name = generate_random_string() + resource_group_name = generate_random_string() + event_name = generate_random_string() + mock_asset_record = get_asset_record( + asset_name=asset_name, asset_resource_group=resource_group_name + ) + mock_asset_record["properties"]["events"] = [ + {"name": event_name, generate_random_string(): generate_random_string()}, + {"name": generate_random_string(), generate_random_string(): generate_random_string()} + ] + mocked_responses.add( + method=responses.GET, + url=get_asset_mgmt_uri(asset_name=asset_name, asset_resource_group=resource_group_name), + json=mock_asset_record, + status=200, + content_type="application/json", + ) + with pytest.raises(InvalidArgumentValueError): + add_asset_event( + cmd=mocked_cmd, + asset_name=asset_name, + resource_group_name=resource_group_name, + event_name=event_name, + event_notifier=generate_random_string(), + ) + + @pytest.mark.parametrize("events_present", [True, False]) @pytest.mark.parametrize("extension", FileType.list()) @pytest.mark.parametrize("output_dir", [None, generate_random_string()]) @@ -868,7 +954,7 @@ def test_event_import( mocker.patch("azext_edge.edge.providers.rpsaas.adr.assets.logger") asset_name = generate_random_string() resource_group_name = generate_random_string() - dup_event_notifier = generate_random_string() + dup_name = generate_random_string() file_path = generate_random_string() mock_asset_record = get_asset_record( asset_name=asset_name, asset_resource_group=resource_group_name @@ -876,8 +962,8 @@ def test_event_import( file_events = [ { "eventConfiguration": "{\"samplingInterval\": 300, \"queueSize\": 30}", - "eventNotifier": dup_event_notifier, - "name": generate_random_string(), + "eventNotifier": generate_random_string(), + "name": dup_name, "observabilityMode": generate_random_string() }, { @@ -890,8 +976,8 @@ def test_event_import( cloud_events = [ { "eventConfiguration": "{\"samplingInterval\": 100, \"queueSize\": 50}", - "eventNotifier": dup_event_notifier, - "name": generate_random_string(), + "eventNotifier": generate_random_string(), + "name": dup_name, "observabilityMode": generate_random_string() }, { @@ -929,19 +1015,19 @@ def test_event_import( mocked_deserialize_file_content.assert_called_once_with(file_path=file_path) events = json.loads(mocked_responses.calls[-1].request.body)["properties"]["events"] assert events - point_map = {point["eventNotifier"]: point for point in events} - assert file_events[1]["eventNotifier"] in point_map - assert dup_event_notifier in point_map + point_map = {point["name"]: point for point in events} + assert file_events[1]["name"] in point_map + assert dup_name in point_map # check the duplicate point if replace: point = file_events[0] - assert file_events[1]["eventNotifier"] in point_map + assert file_events[1]["name"] in point_map else: point = cloud_events[0] - assert cloud_events[1]["eventNotifier"] in point_map - assert point_map[dup_event_notifier]["eventConfiguration"] == point["eventConfiguration"] - assert point_map[dup_event_notifier]["name"] == point["name"] - assert point_map[dup_event_notifier]["observabilityMode"] == point["observabilityMode"] + assert cloud_events[1]["name"] in point_map + assert point_map[dup_name]["eventConfiguration"] == point["eventConfiguration"] + assert point_map[dup_name]["eventNotifier"] == point["eventNotifier"] + assert point_map[dup_name]["observabilityMode"] == point["observabilityMode"] @pytest.mark.parametrize("events_present", [True, False]) From 634e5b6b61edbff278c21e32a81cf64da0ae7925 Mon Sep 17 00:00:00 2001 From: Victoria Litvinova Date: Fri, 27 Sep 2024 14:35:44 -0700 Subject: [PATCH 2/2] pylint --- azext_edge/edge/commands_assets.py | 4 ++-- azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/azext_edge/edge/commands_assets.py b/azext_edge/edge/commands_assets.py index c77d02655..2eb641b9a 100644 --- a/azext_edge/edge/commands_assets.py +++ b/azext_edge/edge/commands_assets.py @@ -247,7 +247,7 @@ def add_asset_data_point( observability_mode: Optional[str] = None, queue_size: Optional[int] = None, sampling_interval: Optional[int] = None, - replace: Optional[bool] = None + replace: Optional[bool] = None ): return Assets(cmd).add_dataset_data_point( asset_name=asset_name, @@ -336,7 +336,7 @@ def add_asset_event( observability_mode: Optional[str] = None, queue_size: Optional[int] = None, sampling_interval: Optional[int] = None, # Note: not in DOE - replace: Optional[bool] = None + replace: Optional[bool] = None ): return Assets(cmd).add_event( asset_name=asset_name, diff --git a/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py b/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py index cd23df9db..32fa48eab 100644 --- a/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py +++ b/azext_edge/tests/edge/rpsaas/adr/test_assets_unit.py @@ -473,7 +473,7 @@ def test_data_point_add_error( "name": dataset_name, "dataPoints": [ {"name": data_point_name, generate_random_string(): generate_random_string()}, - {"name": generate_random_string(), generate_random_string():generate_random_string()} + {"name": generate_random_string(), generate_random_string(): generate_random_string()} ] } mock_asset_record["properties"]["datasets"] = [dataset]