diff --git a/src/azure-cli-core/azure/cli/core/profiles/_shared.py b/src/azure-cli-core/azure/cli/core/profiles/_shared.py index 67a902ff675..d63f0d7fec4 100644 --- a/src/azure-cli-core/azure/cli/core/profiles/_shared.py +++ b/src/azure-cli-core/azure/cli/core/profiles/_shared.py @@ -102,7 +102,7 @@ class ResourceType(Enum): # pylint: disable=too-few-public-methods MGMT_CONSUMPTION = ('azure.mgmt.consumption', None) MGMT_CONTAINERINSTANCE = ('azure.mgmt.containerinstance', None) MGMT_COSMOSDB = ('azure.mgmt.cosmosdb', None) - MGMT_DATALAKE_ANALYTICS = ('azure.mgmt.datalake.analytics', None) + MGMT_DATALAKE_ANALYTICS = ('azure.cli.command_modules.dla.vendored_sdks.azure_mgmt_datalake_analytics', None) MGMT_DATALAKE_STORE = ('azure.mgmt.datalake.store', None) MGMT_DATAMIGRATION = ('azure.mgmt.datamigration', None) MGMT_EVENTGRID = ('azure.mgmt.eventgrid', None) diff --git a/src/azure-cli/azure/cli/command_modules/dla/_client_factory.py b/src/azure-cli/azure/cli/command_modules/dla/_client_factory.py index 5eebc1ea663..b4821e44fdf 100644 --- a/src/azure-cli/azure/cli/command_modules/dla/_client_factory.py +++ b/src/azure-cli/azure/cli/command_modules/dla/_client_factory.py @@ -6,37 +6,37 @@ def cf_dla_account(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.account import DataLakeAnalyticsAccountManagementClient return get_mgmt_service_client(cli_ctx, DataLakeAnalyticsAccountManagementClient).account def cf_dla_account_firewall(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.account import DataLakeAnalyticsAccountManagementClient return get_mgmt_service_client(cli_ctx, DataLakeAnalyticsAccountManagementClient).firewall_rules def cf_dla_account_compute_policy(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.account import DataLakeAnalyticsAccountManagementClient return get_mgmt_service_client(cli_ctx, DataLakeAnalyticsAccountManagementClient).compute_policies def cf_dla_account_storage(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.account import DataLakeAnalyticsAccountManagementClient return get_mgmt_service_client(cli_ctx, DataLakeAnalyticsAccountManagementClient).storage_accounts def cf_dla_account_adls(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.account import DataLakeAnalyticsAccountManagementClient return get_mgmt_service_client(cli_ctx, DataLakeAnalyticsAccountManagementClient).data_lake_store_accounts def cf_dla_catalog(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.catalog import DataLakeAnalyticsCatalogManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.catalog import DataLakeAnalyticsCatalogManagementClient return get_mgmt_service_client( cli_ctx, DataLakeAnalyticsCatalogManagementClient, @@ -48,7 +48,7 @@ def cf_dla_catalog(cli_ctx, _): def cf_dla_job(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.job import DataLakeAnalyticsJobManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.job import DataLakeAnalyticsJobManagementClient return get_mgmt_service_client( cli_ctx, DataLakeAnalyticsJobManagementClient, @@ -60,7 +60,7 @@ def cf_dla_job(cli_ctx, _): def cf_dla_job_recurrence(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.job import DataLakeAnalyticsJobManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.job import DataLakeAnalyticsJobManagementClient return get_mgmt_service_client( cli_ctx, DataLakeAnalyticsJobManagementClient, @@ -72,7 +72,7 @@ def cf_dla_job_recurrence(cli_ctx, _): def cf_dla_job_pipeline(cli_ctx, _): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azure.mgmt.datalake.analytics.job import DataLakeAnalyticsJobManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.job import DataLakeAnalyticsJobManagementClient return get_mgmt_service_client( cli_ctx, DataLakeAnalyticsJobManagementClient, diff --git a/src/azure-cli/azure/cli/command_modules/dla/_params.py b/src/azure-cli/azure/cli/command_modules/dla/_params.py index 3aba3b4896f..bd57c48b825 100644 --- a/src/azure-cli/azure/cli/command_modules/dla/_params.py +++ b/src/azure-cli/azure/cli/command_modules/dla/_params.py @@ -14,10 +14,9 @@ # pylint: disable=line-too-long, too-many-statements def load_arguments(self, _): - from azure.mgmt.datalake.analytics.account.models import (FirewallState, TierType, FirewallAllowAzureIpsState, - AADObjectType) + from .vendored_sdks.azure_mgmt_datalake_analytics.account.models import (FirewallState, TierType, FirewallAllowAzureIpsState, AADObjectType) - from azure.mgmt.datalake.analytics.job.models import (CompileMode, JobState, JobResult) + from .vendored_sdks.azure_mgmt_datalake_analytics.job.models import (CompileMode, JobState, JobResult) datalake_analytics_name_type = CLIArgumentType(help='Name of the Data Lake Analytics account.', options_list=('--account_name',), completer=get_resource_name_completion_list('Microsoft.DataLakeAnalytics/accounts'), id_part='name') diff --git a/src/azure-cli/azure/cli/command_modules/dla/_validators.py b/src/azure-cli/azure/cli/command_modules/dla/_validators.py index 93a5c7c52a2..866c4d8c7d7 100644 --- a/src/azure-cli/azure/cli/command_modules/dla/_validators.py +++ b/src/azure-cli/azure/cli/command_modules/dla/_validators.py @@ -28,7 +28,7 @@ def _get_resource_group_from_account_name(client, account_name): # COMMAND NAMESPACE VALIDATORS def validate_resource_group_name(cmd, ns): - from azure.mgmt.datalake.analytics.account import DataLakeAnalyticsAccountManagementClient + from .vendored_sdks.azure_mgmt_datalake_analytics.account import DataLakeAnalyticsAccountManagementClient if not ns.resource_group_name: account_name = ns.account_name client = get_mgmt_service_client(cmd.cli_ctx, DataLakeAnalyticsAccountManagementClient).account diff --git a/src/azure-cli/azure/cli/command_modules/dla/commands.py b/src/azure-cli/azure/cli/command_modules/dla/commands.py index 4f9aa494567..1d6c143eb91 100644 --- a/src/azure-cli/azure/cli/command_modules/dla/commands.py +++ b/src/azure-cli/azure/cli/command_modules/dla/commands.py @@ -21,7 +21,7 @@ # pylint: disable=too-many-statements def load_command_table(self, _): - adla_format_path = 'azure.mgmt.datalake.analytics.{}.operations.{}#{}.{{}}' + adla_format_path = 'azure.cli.command_modules.dla.vendored_sdks.azure_mgmt_datalake_analytics.{}.operations.{}#{}.{{}}' dla_account_sdk = CliCommandType( operations_tmpl=adla_format_path.format('account', 'account_operations', 'AccountOperations'), diff --git a/src/azure-cli/azure/cli/command_modules/dla/custom.py b/src/azure-cli/azure/cli/command_modules/dla/custom.py index 60d9911fb11..6fad1217209 100644 --- a/src/azure-cli/azure/cli/command_modules/dla/custom.py +++ b/src/azure-cli/azure/cli/command_modules/dla/custom.py @@ -66,7 +66,7 @@ def list_adla_jobs(client, account_name, top=500, name=None, submitter=None, sub def create_adla_account(cmd, client, resource_group_name, account_name, default_data_lake_store, location=None, tags=None, max_degree_of_parallelism=30, max_job_count=3, query_store_retention=30, tier=None): - from azure.mgmt.datalake.analytics.account.models import DataLakeAnalyticsAccount, DataLakeStoreAccountInfo + from .vendored_sdks.azure_mgmt_datalake_analytics.account.models import DataLakeAnalyticsAccount, DataLakeStoreAccountInfo adls_list = [] adls_list.append(DataLakeStoreAccountInfo(default_data_lake_store)) location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name) @@ -85,7 +85,7 @@ def create_adla_account(cmd, client, resource_group_name, account_name, default_ def update_adla_account(client, account_name, resource_group_name, tags=None, max_degree_of_parallelism=None, max_job_count=None, query_store_retention=None, tier=None, firewall_state=None, allow_azure_ips=None): - from azure.mgmt.datalake.analytics.account.models import DataLakeAnalyticsAccountUpdateParameters + from .vendored_sdks.azure_mgmt_datalake_analytics.account.models import DataLakeAnalyticsAccountUpdateParameters update_params = DataLakeAnalyticsAccountUpdateParameters( tags=tags, max_degree_of_parallelism=max_degree_of_parallelism, @@ -120,7 +120,7 @@ def update_adla_blob_storage(client, account_name, storage_account_name, access_ # region firewall def add_adla_firewall_rule(client, account_name, firewall_rule_name, start_ip_address, end_ip_address, resource_group_name): - from azure.mgmt.datalake.analytics.account.models import FirewallRule + from .vendored_sdks.azure_mgmt_datalake_analytics.account.models import FirewallRule create_params = FirewallRule(start_ip_address, end_ip_address) return client.create_or_update(resource_group_name, account_name, @@ -132,7 +132,7 @@ def add_adla_firewall_rule(client, account_name, firewall_rule_name, start_ip_ad # region compute policy def create_adla_compute_policy(client, account_name, compute_policy_name, object_id, object_type, resource_group_name, max_dop_per_job=None, min_priority_per_job=None): - from azure.mgmt.datalake.analytics.account.models import ComputePolicyCreateOrUpdateParameters + from .vendored_sdks.azure_mgmt_datalake_analytics.account.models import ComputePolicyCreateOrUpdateParameters if not max_dop_per_job and not min_priority_per_job: raise CLIError('Please specify at least one of --max-dop-per-job and --min-priority-per-job') @@ -173,7 +173,7 @@ def update_adla_compute_policy(client, account_name, compute_policy_name, resour # region catalog def create_adla_catalog_credential(client, account_name, database_name, credential_name, credential_user_name, uri, credential_user_password=None): - from azure.mgmt.datalake.analytics.catalog.models import DataLakeAnalyticsCatalogCredentialCreateParameters + from .vendored_sdks.azure_mgmt_datalake_analytics.catalog.models import DataLakeAnalyticsCatalogCredentialCreateParameters if not credential_user_password: try: credential_user_password = prompt_pass('Password:', confirm=True) @@ -189,7 +189,7 @@ def create_adla_catalog_credential(client, account_name, database_name, credenti def update_adla_catalog_credential(client, account_name, database_name, credential_name, credential_user_name, uri, credential_user_password=None, new_credential_user_password=None): - from azure.mgmt.datalake.analytics.catalog.models import DataLakeAnalyticsCatalogCredentialUpdateParameters + from .vendored_sdks.azure_mgmt_datalake_analytics.catalog.models import DataLakeAnalyticsCatalogCredentialUpdateParameters if not credential_user_password: try: credential_user_password = prompt_pass('Current Password:', confirm=True) @@ -252,7 +252,7 @@ def list_catalog_table_statistics(client, account_name, database_name, schema_na def submit_adla_job(client, account_name, job_name, script, runtime_version=None, compile_mode=None, compile_only=False, degree_of_parallelism=1, priority=1000, recurrence_id=None, recurrence_name=None, pipeline_id=None, pipeline_name=None, pipeline_uri=None, run_id=None): - from azure.mgmt.datalake.analytics.job.models import ( + from .vendored_sdks.azure_mgmt_datalake_analytics.job.models import ( JobType, CreateJobParameters, BuildJobParameters, CreateUSqlJobProperties, JobRelationshipProperties) if not script: @@ -292,7 +292,7 @@ def submit_adla_job(client, account_name, job_name, script, runtime_version=None def wait_adla_job(client, account_name, job_id, wait_interval_sec=5, max_wait_time_sec=-1): - from azure.mgmt.datalake.analytics.job.models import JobState + from .vendored_sdks.azure_mgmt_datalake_analytics.job.models import JobState if wait_interval_sec < 1: raise CLIError('wait times must be greater than 0 when polling jobs. Value specified: {}' .format(wait_interval_sec)) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/__init__.py new file mode 100644 index 00000000000..68ee4bfbbe7 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +# pylint: skip-file +# flake8: noqa diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/__init__.py new file mode 100644 index 00000000000..d0fcdd00a3f --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/__init__.py @@ -0,0 +1,8 @@ +from .account import DataLakeAnalyticsAccountManagementClient +from .catalog import DataLakeAnalyticsCatalogManagementClient +from .job import DataLakeAnalyticsJobManagementClient + +from .version import VERSION + +__version__ = VERSION + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/__init__.py new file mode 100644 index 00000000000..cf9b64c2000 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/__init__.py @@ -0,0 +1,18 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .data_lake_analytics_account_management_client import DataLakeAnalyticsAccountManagementClient +from .version import VERSION + +__all__ = ['DataLakeAnalyticsAccountManagementClient'] + +__version__ = VERSION + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/data_lake_analytics_account_management_client.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/data_lake_analytics_account_management_client.py new file mode 100644 index 00000000000..af67d64f38a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/data_lake_analytics_account_management_client.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.service_client import ServiceClient +from msrest import Serializer, Deserializer +from msrestazure import AzureConfiguration +from .version import VERSION +from .operations.compute_policies_operations import ComputePoliciesOperations +from .operations.firewall_rules_operations import FirewallRulesOperations +from .operations.storage_accounts_operations import StorageAccountsOperations +from .operations.data_lake_store_accounts_operations import DataLakeStoreAccountsOperations +from .operations.account_operations import AccountOperations +from . import models + + +class DataLakeAnalyticsAccountManagementClientConfiguration(AzureConfiguration): + """Configuration for DataLakeAnalyticsAccountManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: Get subscription credentials which uniquely + identify Microsoft Azure subscription. The subscription ID forms part of + the URI for every service call. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + if not isinstance(subscription_id, str): + raise TypeError("Parameter 'subscription_id' must be str.") + if not base_url: + base_url = 'https://management.azure.com' + + super(DataLakeAnalyticsAccountManagementClientConfiguration, self).__init__(base_url) + + self.add_user_agent('datalakeanalyticsaccountmanagementclient/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.subscription_id = subscription_id + + +class DataLakeAnalyticsAccountManagementClient(object): + """Creates an Azure Data Lake Analytics account management client. + + :ivar config: Configuration for client. + :vartype config: DataLakeAnalyticsAccountManagementClientConfiguration + + :ivar compute_policies: ComputePolicies operations + :vartype compute_policies: azure.mgmt.datalake.analytics.account.operations.ComputePoliciesOperations + :ivar firewall_rules: FirewallRules operations + :vartype firewall_rules: azure.mgmt.datalake.analytics.account.operations.FirewallRulesOperations + :ivar storage_accounts: StorageAccounts operations + :vartype storage_accounts: azure.mgmt.datalake.analytics.account.operations.StorageAccountsOperations + :ivar data_lake_store_accounts: DataLakeStoreAccounts operations + :vartype data_lake_store_accounts: azure.mgmt.datalake.analytics.account.operations.DataLakeStoreAccountsOperations + :ivar account: Account operations + :vartype account: azure.mgmt.datalake.analytics.account.operations.AccountOperations + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: Get subscription credentials which uniquely + identify Microsoft Azure subscription. The subscription ID forms part of + the URI for every service call. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + self.config = DataLakeAnalyticsAccountManagementClientConfiguration(credentials, subscription_id, base_url) + self._client = ServiceClient(self.config.credentials, self.config) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self.api_version = '2016-11-01' + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.compute_policies = ComputePoliciesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.firewall_rules = FirewallRulesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.storage_accounts = StorageAccountsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.data_lake_store_accounts = DataLakeStoreAccountsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.account = AccountOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/__init__.py new file mode 100644 index 00000000000..873f6b045f4 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/__init__.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .storage_account_info import StorageAccountInfo +from .storage_container import StorageContainer +from .sas_token_info import SasTokenInfo +from .data_lake_store_account_info import DataLakeStoreAccountInfo +from .firewall_rule import FirewallRule +from .compute_policy_account_create_parameters import ComputePolicyAccountCreateParameters +from .compute_policy import ComputePolicy +from .add_data_lake_store_parameters import AddDataLakeStoreParameters +from .add_storage_account_parameters import AddStorageAccountParameters +from .update_storage_account_parameters import UpdateStorageAccountParameters +from .compute_policy_create_or_update_parameters import ComputePolicyCreateOrUpdateParameters +from .data_lake_analytics_account_update_parameters import DataLakeAnalyticsAccountUpdateParameters +from .data_lake_analytics_account_properties_basic import DataLakeAnalyticsAccountPropertiesBasic +from .data_lake_analytics_account_basic import DataLakeAnalyticsAccountBasic +from .data_lake_analytics_account import DataLakeAnalyticsAccount +from .update_firewall_rule_parameters import UpdateFirewallRuleParameters +from .resource import Resource +from .optional_sub_resource import OptionalSubResource +from .sub_resource import SubResource +from .compute_policy_paged import ComputePolicyPaged +from .firewall_rule_paged import FirewallRulePaged +from .storage_container_paged import StorageContainerPaged +from .sas_token_info_paged import SasTokenInfoPaged +from .storage_account_info_paged import StorageAccountInfoPaged +from .data_lake_store_account_info_paged import DataLakeStoreAccountInfoPaged +from .data_lake_analytics_account_basic_paged import DataLakeAnalyticsAccountBasicPaged +from .data_lake_analytics_account_management_client_enums import ( + TierType, + FirewallState, + FirewallAllowAzureIpsState, + AADObjectType, + DataLakeAnalyticsAccountStatus, + DataLakeAnalyticsAccountState, +) + +__all__ = [ + 'StorageAccountInfo', + 'StorageContainer', + 'SasTokenInfo', + 'DataLakeStoreAccountInfo', + 'FirewallRule', + 'ComputePolicyAccountCreateParameters', + 'ComputePolicy', + 'AddDataLakeStoreParameters', + 'AddStorageAccountParameters', + 'UpdateStorageAccountParameters', + 'ComputePolicyCreateOrUpdateParameters', + 'DataLakeAnalyticsAccountUpdateParameters', + 'DataLakeAnalyticsAccountPropertiesBasic', + 'DataLakeAnalyticsAccountBasic', + 'DataLakeAnalyticsAccount', + 'UpdateFirewallRuleParameters', + 'Resource', + 'OptionalSubResource', + 'SubResource', + 'ComputePolicyPaged', + 'FirewallRulePaged', + 'StorageContainerPaged', + 'SasTokenInfoPaged', + 'StorageAccountInfoPaged', + 'DataLakeStoreAccountInfoPaged', + 'DataLakeAnalyticsAccountBasicPaged', + 'TierType', + 'FirewallState', + 'FirewallAllowAzureIpsState', + 'AADObjectType', + 'DataLakeAnalyticsAccountStatus', + 'DataLakeAnalyticsAccountState', +] diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/add_data_lake_store_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/add_data_lake_store_parameters.py new file mode 100644 index 00000000000..5453e3da739 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/add_data_lake_store_parameters.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AddDataLakeStoreParameters(Model): + """Additional Data Lake Store parameters. + + :param suffix: the optional suffix for the Data Lake Store account. + :type suffix: str + """ + + _attribute_map = { + 'suffix': {'key': 'properties.suffix', 'type': 'str'}, + } + + def __init__(self, suffix=None): + self.suffix = suffix diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/add_storage_account_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/add_storage_account_parameters.py new file mode 100644 index 00000000000..957513bb71e --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/add_storage_account_parameters.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AddStorageAccountParameters(Model): + """Storage account parameters for a storage account being added to a Data Lake + Analytics account. + + :param access_key: the access key associated with this Azure Storage + account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str + """ + + _validation = { + 'access_key': {'required': True}, + } + + _attribute_map = { + 'access_key': {'key': 'properties.accessKey', 'type': 'str'}, + 'suffix': {'key': 'properties.suffix', 'type': 'str'}, + } + + def __init__(self, access_key, suffix=None): + self.access_key = access_key + self.suffix = suffix diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy.py new file mode 100644 index 00000000000..107da8fb74d --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ComputePolicy(Model): + """The parameters used to create a new compute policy. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the compute policy + :vartype name: str + :ivar object_id: The AAD object identifier for the entity to create a + policy for. + :vartype object_id: str + :ivar object_type: The type of AAD object the object identifier refers to. + Possible values include: 'User', 'Group', 'ServicePrincipal' + :vartype object_type: str or :class:`AADObjectType + ` + :param max_degree_of_parallelism_per_job: The maximum degree of + parallelism per job this user can use to submit jobs. + :type max_degree_of_parallelism_per_job: int + :param min_priority_per_job: The minimum priority per job this user can + use to submit jobs. + :type min_priority_per_job: int + """ + + _validation = { + 'name': {'readonly': True}, + 'object_id': {'readonly': True}, + 'object_type': {'readonly': True}, + 'max_degree_of_parallelism_per_job': {'minimum': 1}, + 'min_priority_per_job': {'minimum': 1}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'object_id': {'key': 'properties.objectId', 'type': 'str'}, + 'object_type': {'key': 'properties.objectType', 'type': 'str'}, + 'max_degree_of_parallelism_per_job': {'key': 'properties.maxDegreeOfParallelismPerJob', 'type': 'int'}, + 'min_priority_per_job': {'key': 'properties.minPriorityPerJob', 'type': 'int'}, + } + + def __init__(self, max_degree_of_parallelism_per_job=None, min_priority_per_job=None): + self.name = None + self.object_id = None + self.object_type = None + self.max_degree_of_parallelism_per_job = max_degree_of_parallelism_per_job + self.min_priority_per_job = min_priority_per_job diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_account_create_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_account_create_parameters.py new file mode 100644 index 00000000000..9bb85f34de5 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_account_create_parameters.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ComputePolicyAccountCreateParameters(Model): + """The parameters used to create a new compute policy. + + :param name: The unique name of the policy to create + :type name: str + :param object_id: The AAD object identifier for the entity to create a + policy for. + :type object_id: str + :param object_type: The type of AAD object the object identifier refers + to. Possible values include: 'User', 'Group', 'ServicePrincipal' + :type object_type: str or :class:`AADObjectType + ` + :param max_degree_of_parallelism_per_job: The maximum degree of + parallelism per job this user can use to submit jobs. This property, the + min priority per job property, or both must be passed. + :type max_degree_of_parallelism_per_job: int + :param min_priority_per_job: The minimum priority per job this user can + use to submit jobs. This property, the max degree of parallelism per job + property, or both must be passed. + :type min_priority_per_job: int + """ + + _validation = { + 'name': {'required': True}, + 'object_id': {'required': True}, + 'object_type': {'required': True}, + 'max_degree_of_parallelism_per_job': {'minimum': 1}, + 'min_priority_per_job': {'minimum': 1}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'object_id': {'key': 'properties.objectId', 'type': 'str'}, + 'object_type': {'key': 'properties.objectType', 'type': 'str'}, + 'max_degree_of_parallelism_per_job': {'key': 'properties.maxDegreeOfParallelismPerJob', 'type': 'int'}, + 'min_priority_per_job': {'key': 'properties.minPriorityPerJob', 'type': 'int'}, + } + + def __init__(self, name, object_id, object_type, max_degree_of_parallelism_per_job=None, min_priority_per_job=None): + self.name = name + self.object_id = object_id + self.object_type = object_type + self.max_degree_of_parallelism_per_job = max_degree_of_parallelism_per_job + self.min_priority_per_job = min_priority_per_job diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_create_or_update_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_create_or_update_parameters.py new file mode 100644 index 00000000000..2a711a2166f --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_create_or_update_parameters.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ComputePolicyCreateOrUpdateParameters(Model): + """The parameters used to create a new compute policy. + + :param object_id: The AAD object identifier for the entity to create a + policy for. + :type object_id: str + :param object_type: The type of AAD object the object identifier refers + to. Possible values include: 'User', 'Group', 'ServicePrincipal' + :type object_type: str or :class:`AADObjectType + ` + :param max_degree_of_parallelism_per_job: The maximum degree of + parallelism per job this user can use to submit jobs. This property, the + min priority per job property, or both must be passed. + :type max_degree_of_parallelism_per_job: int + :param min_priority_per_job: The minimum priority per job this user can + use to submit jobs. This property, the max degree of parallelism per job + property, or both must be passed. + :type min_priority_per_job: int + """ + + _validation = { + 'object_id': {'required': True}, + 'object_type': {'required': True}, + 'max_degree_of_parallelism_per_job': {'minimum': 1}, + 'min_priority_per_job': {'minimum': 1}, + } + + _attribute_map = { + 'object_id': {'key': 'properties.objectId', 'type': 'str'}, + 'object_type': {'key': 'properties.objectType', 'type': 'str'}, + 'max_degree_of_parallelism_per_job': {'key': 'properties.maxDegreeOfParallelismPerJob', 'type': 'int'}, + 'min_priority_per_job': {'key': 'properties.minPriorityPerJob', 'type': 'int'}, + } + + def __init__(self, object_id, object_type, max_degree_of_parallelism_per_job=None, min_priority_per_job=None): + self.object_id = object_id + self.object_type = object_type + self.max_degree_of_parallelism_per_job = max_degree_of_parallelism_per_job + self.min_priority_per_job = min_priority_per_job diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_paged.py new file mode 100644 index 00000000000..3cd24ed75e1 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/compute_policy_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class ComputePolicyPaged(Paged): + """ + A paging container for iterating over a list of :class:`ComputePolicy ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[ComputePolicy]'} + } + + def __init__(self, *args, **kwargs): + + super(ComputePolicyPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account.py new file mode 100644 index 00000000000..28708bcd73a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account.py @@ -0,0 +1,198 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class DataLakeAnalyticsAccount(Resource): + """A Data Lake Analytics account object, containing all information associated + with the named Data Lake Analytics account. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :ivar name: Resource name + :vartype name: str + :ivar type: Resource type + :vartype type: str + :param location: Resource location + :type location: str + :param tags: Resource tags + :type tags: dict + :ivar provisioning_state: the provisioning status of the Data Lake + Analytics account. Possible values include: 'Failed', 'Creating', + 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', + 'Deleted' + :vartype provisioning_state: str or :class:`DataLakeAnalyticsAccountStatus + ` + :ivar state: the state of the Data Lake Analytics account. Possible values + include: 'Active', 'Suspended' + :vartype state: str or :class:`DataLakeAnalyticsAccountState + ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Analytics account. + :vartype account_id: str + :param default_data_lake_store_account: the default data lake storage + account associated with this Data Lake Analytics account. + :type default_data_lake_store_account: str + :param max_degree_of_parallelism: the maximum supported degree of + parallelism for this account. Default value: 30 . + :type max_degree_of_parallelism: int + :param query_store_retention: the number of days that job metadata is + retained. Default value: 30 . + :type query_store_retention: int + :param max_job_count: the maximum supported jobs running under the account + at the same time. Default value: 3 . + :type max_job_count: int + :ivar system_max_degree_of_parallelism: the system defined maximum + supported degree of parallelism for this account, which restricts the + maximum value of parallelism the user can set for the account.. + :vartype system_max_degree_of_parallelism: int + :ivar system_max_job_count: the system defined maximum supported jobs + running under the account at the same time, which restricts the maximum + number of running jobs the user can set for the account. + :vartype system_max_job_count: int + :param data_lake_store_accounts: the list of Data Lake storage accounts + associated with this account. + :type data_lake_store_accounts: list of :class:`DataLakeStoreAccountInfo + ` + :param storage_accounts: the list of Azure Blob storage accounts + associated with this account. + :type storage_accounts: list of :class:`StorageAccountInfo + ` + :param new_tier: the commitment tier for the next month. Possible values + include: 'Consumption', 'Commitment_100AUHours', 'Commitment_500AUHours', + 'Commitment_1000AUHours', 'Commitment_5000AUHours', + 'Commitment_10000AUHours', 'Commitment_50000AUHours', + 'Commitment_100000AUHours', 'Commitment_500000AUHours' + :type new_tier: str or :class:`TierType + ` + :ivar current_tier: the commitment tier in use for the current month. + Possible values include: 'Consumption', 'Commitment_100AUHours', + 'Commitment_500AUHours', 'Commitment_1000AUHours', + 'Commitment_5000AUHours', 'Commitment_10000AUHours', + 'Commitment_50000AUHours', 'Commitment_100000AUHours', + 'Commitment_500000AUHours' + :vartype current_tier: str or :class:`TierType + ` + :param firewall_state: The current state of the IP address firewall for + this Data Lake Analytics account. Possible values include: 'Enabled', + 'Disabled' + :type firewall_state: str or :class:`FirewallState + ` + :param firewall_allow_azure_ips: The current state of allowing or + disallowing IPs originating within Azure through the firewall. If the + firewall is disabled, this is not enforced. Possible values include: + 'Enabled', 'Disabled' + :type firewall_allow_azure_ips: str or :class:`FirewallAllowAzureIpsState + ` + :param firewall_rules: The list of firewall rules associated with this + Data Lake Analytics account. + :type firewall_rules: list of :class:`FirewallRule + ` + :param max_degree_of_parallelism_per_job: the maximum supported degree of + parallelism per job for this account. + :type max_degree_of_parallelism_per_job: int + :param min_priority_per_job: the minimum supported priority per job for + this account. + :type min_priority_per_job: int + :param compute_policies: the list of compute policies to create in this + account. + :type compute_policies: list of + :class:`ComputePolicyAccountCreateParameters + ` + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + 'default_data_lake_store_account': {'required': True}, + 'max_degree_of_parallelism': {'minimum': 1}, + 'query_store_retention': {'maximum': 180, 'minimum': 1}, + 'max_job_count': {'minimum': 1}, + 'system_max_degree_of_parallelism': {'readonly': True}, + 'system_max_job_count': {'readonly': True}, + 'data_lake_store_accounts': {'required': True}, + 'current_tier': {'readonly': True}, + 'max_degree_of_parallelism_per_job': {'minimum': 1}, + 'min_priority_per_job': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'}, + 'state': {'key': 'properties.state', 'type': 'DataLakeAnalyticsAccountState'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + 'default_data_lake_store_account': {'key': 'properties.defaultDataLakeStoreAccount', 'type': 'str'}, + 'max_degree_of_parallelism': {'key': 'properties.maxDegreeOfParallelism', 'type': 'int'}, + 'query_store_retention': {'key': 'properties.queryStoreRetention', 'type': 'int'}, + 'max_job_count': {'key': 'properties.maxJobCount', 'type': 'int'}, + 'system_max_degree_of_parallelism': {'key': 'properties.systemMaxDegreeOfParallelism', 'type': 'int'}, + 'system_max_job_count': {'key': 'properties.systemMaxJobCount', 'type': 'int'}, + 'data_lake_store_accounts': {'key': 'properties.dataLakeStoreAccounts', 'type': '[DataLakeStoreAccountInfo]'}, + 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccountInfo]'}, + 'new_tier': {'key': 'properties.newTier', 'type': 'TierType'}, + 'current_tier': {'key': 'properties.currentTier', 'type': 'TierType'}, + 'firewall_state': {'key': 'properties.firewallState', 'type': 'FirewallState'}, + 'firewall_allow_azure_ips': {'key': 'properties.firewallAllowAzureIps', 'type': 'FirewallAllowAzureIpsState'}, + 'firewall_rules': {'key': 'properties.firewallRules', 'type': '[FirewallRule]'}, + 'max_degree_of_parallelism_per_job': {'key': 'properties.maxDegreeOfParallelismPerJob', 'type': 'int'}, + 'min_priority_per_job': {'key': 'properties.minPriorityPerJob', 'type': 'int'}, + 'compute_policies': {'key': 'properties.computePolicies', 'type': '[ComputePolicyAccountCreateParameters]'}, + } + + def __init__(self, location, default_data_lake_store_account, data_lake_store_accounts, tags=None, max_degree_of_parallelism=30, query_store_retention=30, max_job_count=3, storage_accounts=None, new_tier=None, firewall_state=None, firewall_allow_azure_ips=None, firewall_rules=None, max_degree_of_parallelism_per_job=None, min_priority_per_job=None, compute_policies=None): + super(DataLakeAnalyticsAccount, self).__init__(location=location, tags=tags) + self.provisioning_state = None + self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None + self.default_data_lake_store_account = default_data_lake_store_account + self.max_degree_of_parallelism = max_degree_of_parallelism + self.query_store_retention = query_store_retention + self.max_job_count = max_job_count + self.system_max_degree_of_parallelism = None + self.system_max_job_count = None + self.data_lake_store_accounts = data_lake_store_accounts + self.storage_accounts = storage_accounts + self.new_tier = new_tier + self.current_tier = None + self.firewall_state = firewall_state + self.firewall_allow_azure_ips = firewall_allow_azure_ips + self.firewall_rules = firewall_rules + self.max_degree_of_parallelism_per_job = max_degree_of_parallelism_per_job + self.min_priority_per_job = min_priority_per_job + self.compute_policies = compute_policies diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_basic.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_basic.py new file mode 100644 index 00000000000..61b694f43d5 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_basic.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class DataLakeAnalyticsAccountBasic(Resource): + """A Data Lake Analytics account object, containing all information associated + with the named Data Lake Analytics account. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :ivar name: Resource name + :vartype name: str + :ivar type: Resource type + :vartype type: str + :param location: Resource location + :type location: str + :param tags: Resource tags + :type tags: dict + :ivar provisioning_state: the provisioning status of the Data Lake + Analytics account. Possible values include: 'Failed', 'Creating', + 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', + 'Deleted' + :vartype provisioning_state: str or :class:`DataLakeAnalyticsAccountStatus + ` + :ivar state: the state of the Data Lake Analytics account. Possible values + include: 'Active', 'Suspended' + :vartype state: str or :class:`DataLakeAnalyticsAccountState + ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Analytics account. + :vartype account_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'}, + 'state': {'key': 'properties.state', 'type': 'DataLakeAnalyticsAccountState'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + } + + def __init__(self, location, tags=None): + super(DataLakeAnalyticsAccountBasic, self).__init__(location=location, tags=tags) + self.provisioning_state = None + self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_basic_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_basic_paged.py new file mode 100644 index 00000000000..279867b941e --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_basic_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class DataLakeAnalyticsAccountBasicPaged(Paged): + """ + A paging container for iterating over a list of :class:`DataLakeAnalyticsAccountBasic ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataLakeAnalyticsAccountBasic]'} + } + + def __init__(self, *args, **kwargs): + + super(DataLakeAnalyticsAccountBasicPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_management_client_enums.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_management_client_enums.py new file mode 100644 index 00000000000..ed4a500305c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_management_client_enums.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + + +class TierType(Enum): + + consumption = "Consumption" + commitment_100_au_hours = "Commitment_100AUHours" + commitment_500_au_hours = "Commitment_500AUHours" + commitment_1000_au_hours = "Commitment_1000AUHours" + commitment_5000_au_hours = "Commitment_5000AUHours" + commitment_10000_au_hours = "Commitment_10000AUHours" + commitment_50000_au_hours = "Commitment_50000AUHours" + commitment_100000_au_hours = "Commitment_100000AUHours" + commitment_500000_au_hours = "Commitment_500000AUHours" + + +class FirewallState(Enum): + + enabled = "Enabled" + disabled = "Disabled" + + +class FirewallAllowAzureIpsState(Enum): + + enabled = "Enabled" + disabled = "Disabled" + + +class AADObjectType(Enum): + + user = "User" + group = "Group" + service_principal = "ServicePrincipal" + + +class DataLakeAnalyticsAccountStatus(Enum): + + failed = "Failed" + creating = "Creating" + running = "Running" + succeeded = "Succeeded" + patching = "Patching" + suspending = "Suspending" + resuming = "Resuming" + deleting = "Deleting" + deleted = "Deleted" + + +class DataLakeAnalyticsAccountState(Enum): + + active = "Active" + suspended = "Suspended" diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_properties_basic.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_properties_basic.py new file mode 100644 index 00000000000..9cfae99acc4 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_properties_basic.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsAccountPropertiesBasic(Model): + """The basic account specific properties that are associated with an + underlying Data Lake Analytics account. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provisioning_state: the provisioning status of the Data Lake + Analytics account. Possible values include: 'Failed', 'Creating', + 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', + 'Deleted' + :vartype provisioning_state: str or :class:`DataLakeAnalyticsAccountStatus + ` + :ivar state: the state of the Data Lake Analytics account. Possible values + include: 'Active', 'Suspended' + :vartype state: str or :class:`DataLakeAnalyticsAccountState + ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Analytics account. + :vartype account_id: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'}, + 'state': {'key': 'state', 'type': 'DataLakeAnalyticsAccountState'}, + 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'account_id': {'key': 'accountId', 'type': 'str'}, + } + + def __init__(self): + self.provisioning_state = None + self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_update_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_update_parameters.py new file mode 100644 index 00000000000..4d9f461391d --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_analytics_account_update_parameters.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsAccountUpdateParameters(Model): + """The parameters that can be used to update an existing Data Lake Analytics + account. + + :param tags: Resource tags + :type tags: dict + :param max_degree_of_parallelism: the maximum supported degree of + parallelism for this account. + :type max_degree_of_parallelism: int + :param query_store_retention: the number of days that job metadata is + retained. + :type query_store_retention: int + :param max_job_count: the maximum supported jobs running under the account + at the same time. + :type max_job_count: int + :param new_tier: the commitment tier to use for next month. Possible + values include: 'Consumption', 'Commitment_100AUHours', + 'Commitment_500AUHours', 'Commitment_1000AUHours', + 'Commitment_5000AUHours', 'Commitment_10000AUHours', + 'Commitment_50000AUHours', 'Commitment_100000AUHours', + 'Commitment_500000AUHours' + :type new_tier: str or :class:`TierType + ` + :param firewall_state: The current state of the IP address firewall for + this Data Lake Analytics account. Possible values include: 'Enabled', + 'Disabled' + :type firewall_state: str or :class:`FirewallState + ` + :param firewall_allow_azure_ips: The current state of allowing or + disallowing IPs originating within Azure through the firewall. If the + firewall is disabled, this is not enforced. Possible values include: + 'Enabled', 'Disabled' + :type firewall_allow_azure_ips: str or :class:`FirewallAllowAzureIpsState + ` + :param firewall_rules: The list of firewall rules associated with this + Data Lake Analytics account. + :type firewall_rules: list of :class:`FirewallRule + ` + :param max_degree_of_parallelism_per_job: the maximum supported degree of + parallelism per job for this account. + :type max_degree_of_parallelism_per_job: int + :param min_priority_per_job: the minimum supported priority per job for + this account. + :type min_priority_per_job: int + :param compute_policies: the list of existing compute policies to update + in this account. + :type compute_policies: list of :class:`ComputePolicy + ` + """ + + _validation = { + 'max_degree_of_parallelism': {'minimum': 1}, + 'query_store_retention': {'maximum': 180, 'minimum': 1}, + 'max_job_count': {'minimum': 1}, + 'max_degree_of_parallelism_per_job': {'minimum': 1}, + 'min_priority_per_job': {'minimum': 1}, + } + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'max_degree_of_parallelism': {'key': 'properties.maxDegreeOfParallelism', 'type': 'int'}, + 'query_store_retention': {'key': 'properties.queryStoreRetention', 'type': 'int'}, + 'max_job_count': {'key': 'properties.maxJobCount', 'type': 'int'}, + 'new_tier': {'key': 'properties.newTier', 'type': 'TierType'}, + 'firewall_state': {'key': 'properties.firewallState', 'type': 'FirewallState'}, + 'firewall_allow_azure_ips': {'key': 'properties.firewallAllowAzureIps', 'type': 'FirewallAllowAzureIpsState'}, + 'firewall_rules': {'key': 'properties.firewallRules', 'type': '[FirewallRule]'}, + 'max_degree_of_parallelism_per_job': {'key': 'properties.maxDegreeOfParallelismPerJob', 'type': 'int'}, + 'min_priority_per_job': {'key': 'properties.minPriorityPerJob', 'type': 'int'}, + 'compute_policies': {'key': 'properties.computePolicies', 'type': '[ComputePolicy]'}, + } + + def __init__(self, tags=None, max_degree_of_parallelism=None, query_store_retention=None, max_job_count=None, new_tier=None, firewall_state=None, firewall_allow_azure_ips=None, firewall_rules=None, max_degree_of_parallelism_per_job=None, min_priority_per_job=None, compute_policies=None): + self.tags = tags + self.max_degree_of_parallelism = max_degree_of_parallelism + self.query_store_retention = query_store_retention + self.max_job_count = max_job_count + self.new_tier = new_tier + self.firewall_state = firewall_state + self.firewall_allow_azure_ips = firewall_allow_azure_ips + self.firewall_rules = firewall_rules + self.max_degree_of_parallelism_per_job = max_degree_of_parallelism_per_job + self.min_priority_per_job = min_priority_per_job + self.compute_policies = compute_policies diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_store_account_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_store_account_info.py new file mode 100644 index 00000000000..7edb5f34586 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_store_account_info.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class DataLakeStoreAccountInfo(SubResource): + """Data Lake Store account information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param suffix: the optional suffix for the Data Lake Store account. + :type suffix: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'required': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'suffix': {'key': 'properties.suffix', 'type': 'str'}, + } + + def __init__(self, name, suffix=None): + super(DataLakeStoreAccountInfo, self).__init__(name=name) + self.suffix = suffix diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_store_account_info_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_store_account_info_paged.py new file mode 100644 index 00000000000..e3f82dc99f3 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/data_lake_store_account_info_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class DataLakeStoreAccountInfoPaged(Paged): + """ + A paging container for iterating over a list of :class:`DataLakeStoreAccountInfo ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataLakeStoreAccountInfo]'} + } + + def __init__(self, *args, **kwargs): + + super(DataLakeStoreAccountInfoPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/firewall_rule.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/firewall_rule.py new file mode 100644 index 00000000000..a8c56398f0e --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/firewall_rule.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .optional_sub_resource import OptionalSubResource + + +class FirewallRule(OptionalSubResource): + """Data Lake Analytics firewall rule information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param start_ip_address: the start IP address for the firewall rule. This + can be either ipv4 or ipv6. Start and End should be in the same protocol. + :type start_ip_address: str + :param end_ip_address: the end IP address for the firewall rule. This can + be either ipv4 or ipv6. Start and End should be in the same protocol. + :type end_ip_address: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'start_ip_address': {'required': True}, + 'end_ip_address': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'}, + 'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'}, + } + + def __init__(self, start_ip_address, end_ip_address, name=None): + super(FirewallRule, self).__init__(name=name) + self.start_ip_address = start_ip_address + self.end_ip_address = end_ip_address diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/firewall_rule_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/firewall_rule_paged.py new file mode 100644 index 00000000000..20614418048 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/firewall_rule_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class FirewallRulePaged(Paged): + """ + A paging container for iterating over a list of :class:`FirewallRule ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[FirewallRule]'} + } + + def __init__(self, *args, **kwargs): + + super(FirewallRulePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/optional_sub_resource.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/optional_sub_resource.py new file mode 100644 index 00000000000..c2f462fb376 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/optional_sub_resource.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OptionalSubResource(Model): + """The Resource model definition for a nested resource with no required + properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, name=None): + self.id = None + self.name = name + self.type = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/resource.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/resource.py new file mode 100644 index 00000000000..18861c15a8b --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Resource(Model): + """The Resource model definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :ivar name: Resource name + :vartype name: str + :ivar type: Resource type + :vartype type: str + :param location: Resource location + :type location: str + :param tags: Resource tags + :type tags: dict + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, location, tags=None): + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sas_token_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sas_token_info.py new file mode 100644 index 00000000000..7c7e2dfa277 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sas_token_info.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SasTokenInfo(Model): + """SAS token information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar access_token: the access token for the associated Azure Storage + Container. + :vartype access_token: str + """ + + _validation = { + 'access_token': {'readonly': True}, + } + + _attribute_map = { + 'access_token': {'key': 'accessToken', 'type': 'str'}, + } + + def __init__(self): + self.access_token = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sas_token_info_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sas_token_info_paged.py new file mode 100644 index 00000000000..42eb70a3150 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sas_token_info_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class SasTokenInfoPaged(Paged): + """ + A paging container for iterating over a list of :class:`SasTokenInfo ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[SasTokenInfo]'} + } + + def __init__(self, *args, **kwargs): + + super(SasTokenInfoPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_account_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_account_info.py new file mode 100644 index 00000000000..fbbc04119b2 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_account_info.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class StorageAccountInfo(SubResource): + """Azure Storage account information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + :param access_key: the access key associated with this Azure Storage + account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'required': True}, + 'type': {'readonly': True}, + 'access_key': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key': {'key': 'properties.accessKey', 'type': 'str'}, + 'suffix': {'key': 'properties.suffix', 'type': 'str'}, + } + + def __init__(self, name, access_key, suffix=None): + super(StorageAccountInfo, self).__init__(name=name) + self.access_key = access_key + self.suffix = suffix diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_account_info_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_account_info_paged.py new file mode 100644 index 00000000000..ed2c0704083 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_account_info_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class StorageAccountInfoPaged(Paged): + """ + A paging container for iterating over a list of :class:`StorageAccountInfo ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[StorageAccountInfo]'} + } + + def __init__(self, *args, **kwargs): + + super(StorageAccountInfoPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_container.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_container.py new file mode 100644 index 00000000000..7280a7e2f18 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_container.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StorageContainer(Model): + """Azure Storage blob container information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: the unique identifier of the blob container. + :vartype id: str + :ivar name: the name of the blob container. + :vartype name: str + :ivar type: the type of the blob container. + :vartype type: str + :ivar last_modified_time: the last modified time of the blob container. + :vartype last_modified_time: datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + } + + def __init__(self): + self.id = None + self.name = None + self.type = None + self.last_modified_time = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_container_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_container_paged.py new file mode 100644 index 00000000000..c18b0555a33 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/storage_container_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class StorageContainerPaged(Paged): + """ + A paging container for iterating over a list of :class:`StorageContainer ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[StorageContainer]'} + } + + def __init__(self, *args, **kwargs): + + super(StorageContainerPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sub_resource.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sub_resource.py new file mode 100644 index 00000000000..101d908394c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/sub_resource.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SubResource(Model): + """The Sub Resource model definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :param name: Resource name + :type name: str + :ivar type: Resource type + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'required': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, name): + self.id = None + self.name = name + self.type = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/update_firewall_rule_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/update_firewall_rule_parameters.py new file mode 100644 index 00000000000..430c4987446 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/update_firewall_rule_parameters.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateFirewallRuleParameters(Model): + """Data Lake Analytics firewall rule update parameters. + + :param start_ip_address: the start IP address for the firewall rule. This + can be either ipv4 or ipv6. Start and End should be in the same protocol. + :type start_ip_address: str + :param end_ip_address: the end IP address for the firewall rule. This can + be either ipv4 or ipv6. Start and End should be in the same protocol. + :type end_ip_address: str + """ + + _attribute_map = { + 'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'}, + 'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'}, + } + + def __init__(self, start_ip_address=None, end_ip_address=None): + self.start_ip_address = start_ip_address + self.end_ip_address = end_ip_address diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/update_storage_account_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/update_storage_account_parameters.py new file mode 100644 index 00000000000..df778702bf2 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/models/update_storage_account_parameters.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateStorageAccountParameters(Model): + """Storage account parameters for a storage account being updated in a Data + Lake Analytics account. + + :param access_key: the updated access key associated with this Azure + Storage account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str + """ + + _attribute_map = { + 'access_key': {'key': 'properties.accessKey', 'type': 'str'}, + 'suffix': {'key': 'properties.suffix', 'type': 'str'}, + } + + def __init__(self, access_key=None, suffix=None): + self.access_key = access_key + self.suffix = suffix diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/__init__.py new file mode 100644 index 00000000000..cceba2aec10 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/__init__.py @@ -0,0 +1,24 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .compute_policies_operations import ComputePoliciesOperations +from .firewall_rules_operations import FirewallRulesOperations +from .storage_accounts_operations import StorageAccountsOperations +from .data_lake_store_accounts_operations import DataLakeStoreAccountsOperations +from .account_operations import AccountOperations + +__all__ = [ + 'ComputePoliciesOperations', + 'FirewallRulesOperations', + 'StorageAccountsOperations', + 'DataLakeStoreAccountsOperations', + 'AccountOperations', +] diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/account_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/account_operations.py new file mode 100644 index 00000000000..54ceca3684b --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/account_operations.py @@ -0,0 +1,595 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrestazure.azure_operation import AzureOperationPoller + +from .. import models + + +class AccountOperations(object): + """AccountOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def list_by_resource_group( + self, resource_group_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Gets the first page of Data Lake Analytics accounts, if any, within a + specific resource group. This includes a link to the next page, if any. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of + :class:`DataLakeAnalyticsAccountBasic + ` + :rtype: :class:`DataLakeAnalyticsAccountBasicPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list( + self, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Gets the first page of Data Lake Analytics accounts, if any, within the + current subscription. This includes a link to the next page, if any. + + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of + :class:`DataLakeAnalyticsAccountBasic + ` + :rtype: :class:`DataLakeAnalyticsAccountBasicPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/providers/Microsoft.DataLakeAnalytics/accounts' + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def create( + self, resource_group_name, account_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates the specified Data Lake Analytics account. This supplies the + user with computation services for Data Lake Analytics workloads. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account.the account will be + associated with. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + create. + :type account_name: str + :param parameters: Parameters supplied to the create Data Lake + Analytics account operation. + :type parameters: :class:`DataLakeAnalyticsAccount + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :return: + :class:`AzureOperationPoller` + instance that returns :class:`DataLakeAnalyticsAccount + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: + :class:`AzureOperationPoller` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsAccount') + + # Construct and send request + def long_running_send(): + + request = self._client.put(url, query_parameters) + return self._client.send( + request, header_parameters, body_content, **operation_config) + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + return self._client.send( + request, header_parameters, **operation_config) + + def get_long_running_output(response): + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + if response.status_code == 201: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + if raw: + response = long_running_send() + return get_long_running_output(response) + + long_running_operation_timeout = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) + + def update( + self, resource_group_name, account_name, parameters=None, custom_headers=None, raw=False, **operation_config): + """Updates the Data Lake Analytics account object specified by the + accountName with the contents of the account object. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + update. + :type account_name: str + :param parameters: Parameters supplied to the update Data Lake + Analytics account operation. + :type parameters: :class:`DataLakeAnalyticsAccountUpdateParameters + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :return: + :class:`AzureOperationPoller` + instance that returns :class:`DataLakeAnalyticsAccount + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: + :class:`AzureOperationPoller` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsAccountUpdateParameters') + else: + body_content = None + + # Construct and send request + def long_running_send(): + + request = self._client.patch(url, query_parameters) + return self._client.send( + request, header_parameters, body_content, **operation_config) + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + return self._client.send( + request, header_parameters, **operation_config) + + def get_long_running_output(response): + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + if response.status_code == 201: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + if raw: + response = long_running_send() + return get_long_running_output(response) + + long_running_operation_timeout = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) + + def delete( + self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): + """Begins the delete process for the Data Lake Analytics account object + specified by the account name. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + delete + :type account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :return: + :class:`AzureOperationPoller` + instance that returns None or + :class:`ClientRawResponse` if + raw=true + :rtype: + :class:`AzureOperationPoller` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + def long_running_send(): + + request = self._client.delete(url, query_parameters) + return self._client.send(request, header_parameters, **operation_config) + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + return self._client.send( + request, header_parameters, **operation_config) + + def get_long_running_output(response): + + if response.status_code not in [200, 202, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + if raw: + response = long_running_send() + return get_long_running_output(response) + + long_running_operation_timeout = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) + + def get( + self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): + """Gets details of the specified Data Lake Analytics account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + retrieve. + :type account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`DataLakeAnalyticsAccount + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`DataLakeAnalyticsAccount + ` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/compute_policies_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/compute_policies_operations.py new file mode 100644 index 00000000000..1dae436a6af --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/compute_policies_operations.py @@ -0,0 +1,412 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class ComputePoliciesOperations(object): + """ComputePoliciesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def create_or_update( + self, resource_group_name, account_name, compute_policy_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates or updates the specified compute policy. During update, the + compute policy with the specified name will be replaced with this new + compute policy. An account supports, at most, 50 policies. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + add or replace the compute policy. + :type account_name: str + :param compute_policy_name: The name of the compute policy to create + or update. + :type compute_policy_name: str + :param parameters: Parameters supplied to create or update the compute + policy. The max degree of parallelism per job property, min priority + per job property, or both must be present. + :type parameters: :class:`ComputePolicyCreateOrUpdateParameters + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'computePolicyName': self._serialize.url("compute_policy_name", compute_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ComputePolicyCreateOrUpdateParameters') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ComputePolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def update( + self, resource_group_name, account_name, compute_policy_name, max_degree_of_parallelism_per_job=None, min_priority_per_job=None, custom_headers=None, raw=False, **operation_config): + """Updates the specified compute policy. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + which to update the compute policy. + :type account_name: str + :param compute_policy_name: The name of the compute policy to update. + :type compute_policy_name: str + :param max_degree_of_parallelism_per_job: The maximum degree of + parallelism per job this user can use to submit jobs. + :type max_degree_of_parallelism_per_job: int + :param min_priority_per_job: The minimum priority per job this user + can use to submit jobs. + :type min_priority_per_job: int + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = None + if max_degree_of_parallelism_per_job is not None or min_priority_per_job is not None: + parameters = models.ComputePolicy(max_degree_of_parallelism_per_job=max_degree_of_parallelism_per_job, min_priority_per_job=min_priority_per_job) + + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'computePolicyName': self._serialize.url("compute_policy_name", compute_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'ComputePolicy') + else: + body_content = None + + # Construct and send request + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ComputePolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def delete( + self, resource_group_name, account_name, compute_policy_name, custom_headers=None, raw=False, **operation_config): + """Deletes the specified compute policy from the specified Data Lake + Analytics account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to delete the compute policy. + :type account_name: str + :param compute_policy_name: The name of the compute policy to delete. + :type compute_policy_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'computePolicyName': self._serialize.url("compute_policy_name", compute_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def get( + self, resource_group_name, account_name, compute_policy_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Data Lake Analytics compute policy. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to get the compute policy. + :type account_name: str + :param compute_policy_name: The name of the compute policy to + retrieve. + :type compute_policy_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies/{computePolicyName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'computePolicyName': self._serialize.url("compute_policy_name", compute_policy_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ComputePolicy', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_by_account( + self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): + """Lists the Data Lake Analytics compute policies within the specified + Data Lake Analytics account. An account supports, at most, 50 policies. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to get the compute policies. + :type account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`ComputePolicy + ` + :rtype: :class:`ComputePolicyPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/computePolicies' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.ComputePolicyPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.ComputePolicyPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/data_lake_store_accounts_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/data_lake_store_accounts_operations.py new file mode 100644 index 00000000000..e7146df9c79 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/data_lake_store_accounts_operations.py @@ -0,0 +1,354 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class DataLakeStoreAccountsOperations(object): + """DataLakeStoreAccountsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def add( + self, resource_group_name, account_name, data_lake_store_account_name, suffix=None, custom_headers=None, raw=False, **operation_config): + """Updates the specified Data Lake Analytics account to include the + additional Data Lake Store account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + which to add the Data Lake Store account. + :type account_name: str + :param data_lake_store_account_name: The name of the Data Lake Store + account to add. + :type data_lake_store_account_name: str + :param suffix: the optional suffix for the Data Lake Store account. + :type suffix: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = None + if suffix is not None: + parameters = models.AddDataLakeStoreParameters(suffix=suffix) + + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/DataLakeStoreAccounts/{dataLakeStoreAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'dataLakeStoreAccountName': self._serialize.url("data_lake_store_account_name", data_lake_store_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'AddDataLakeStoreParameters') + else: + body_content = None + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, account_name, data_lake_store_account_name, custom_headers=None, raw=False, **operation_config): + """Updates the Data Lake Analytics account specified to remove the + specified Data Lake Store account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to remove the Data Lake Store account. + :type account_name: str + :param data_lake_store_account_name: The name of the Data Lake Store + account to remove + :type data_lake_store_account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/DataLakeStoreAccounts/{dataLakeStoreAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'dataLakeStoreAccountName': self._serialize.url("data_lake_store_account_name", data_lake_store_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def get( + self, resource_group_name, account_name, data_lake_store_account_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Data Lake Store account details in the specified + Data Lake Analytics account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to retrieve the Data Lake Store account details. + :type account_name: str + :param data_lake_store_account_name: The name of the Data Lake Store + account to retrieve + :type data_lake_store_account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`DataLakeStoreAccountInfo + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`DataLakeStoreAccountInfo + ` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/DataLakeStoreAccounts/{dataLakeStoreAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'dataLakeStoreAccountName': self._serialize.url("data_lake_store_account_name", data_lake_store_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeStoreAccountInfo', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_by_account( + self, resource_group_name, account_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Gets the first page of Data Lake Store accounts linked to the specified + Data Lake Analytics account. The response includes a link to the next + page, if any. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account for + which to list Data Lake Store accounts. + :type account_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`DataLakeStoreAccountInfo + ` + :rtype: :class:`DataLakeStoreAccountInfoPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/DataLakeStoreAccounts/' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.DataLakeStoreAccountInfoPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.DataLakeStoreAccountInfoPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/firewall_rules_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/firewall_rules_operations.py new file mode 100644 index 00000000000..abe419d78f8 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/firewall_rules_operations.py @@ -0,0 +1,412 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class FirewallRulesOperations(object): + """FirewallRulesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def create_or_update( + self, resource_group_name, account_name, firewall_rule_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates or updates the specified firewall rule. During update, the + firewall rule with the specified name will be replaced with this new + firewall rule. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + add or replace the firewall rule. + :type account_name: str + :param firewall_rule_name: The name of the firewall rule to create or + update. + :type firewall_rule_name: str + :param parameters: Parameters supplied to create or update the + firewall rule. + :type parameters: :class:`FirewallRule + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`FirewallRule + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'firewallRuleName': self._serialize.url("firewall_rule_name", firewall_rule_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'FirewallRule') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('FirewallRule', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def update( + self, resource_group_name, account_name, firewall_rule_name, start_ip_address=None, end_ip_address=None, custom_headers=None, raw=False, **operation_config): + """Updates the specified firewall rule. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + which to update the firewall rule. + :type account_name: str + :param firewall_rule_name: The name of the firewall rule to update. + :type firewall_rule_name: str + :param start_ip_address: the start IP address for the firewall rule. + This can be either ipv4 or ipv6. Start and End should be in the same + protocol. + :type start_ip_address: str + :param end_ip_address: the end IP address for the firewall rule. This + can be either ipv4 or ipv6. Start and End should be in the same + protocol. + :type end_ip_address: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`FirewallRule + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = None + if start_ip_address is not None or end_ip_address is not None: + parameters = models.UpdateFirewallRuleParameters(start_ip_address=start_ip_address, end_ip_address=end_ip_address) + + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'firewallRuleName': self._serialize.url("firewall_rule_name", firewall_rule_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'UpdateFirewallRuleParameters') + else: + body_content = None + + # Construct and send request + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('FirewallRule', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def delete( + self, resource_group_name, account_name, firewall_rule_name, custom_headers=None, raw=False, **operation_config): + """Deletes the specified firewall rule from the specified Data Lake + Analytics account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to delete the firewall rule. + :type account_name: str + :param firewall_rule_name: The name of the firewall rule to delete. + :type firewall_rule_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'firewallRuleName': self._serialize.url("firewall_rule_name", firewall_rule_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def get( + self, resource_group_name, account_name, firewall_rule_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Data Lake Analytics firewall rule. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to get the firewall rule. + :type account_name: str + :param firewall_rule_name: The name of the firewall rule to retrieve. + :type firewall_rule_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`FirewallRule + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules/{firewallRuleName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'firewallRuleName': self._serialize.url("firewall_rule_name", firewall_rule_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('FirewallRule', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_by_account( + self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): + """Lists the Data Lake Analytics firewall rules within the specified Data + Lake Analytics account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to get the firewall rules. + :type account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`FirewallRule + ` + :rtype: :class:`FirewallRulePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/firewallRules' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.FirewallRulePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.FirewallRulePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/storage_accounts_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/storage_accounts_operations.py new file mode 100644 index 00000000000..76481dbb529 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/operations/storage_accounts_operations.py @@ -0,0 +1,667 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class StorageAccountsOperations(object): + """StorageAccountsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def add( + self, resource_group_name, account_name, storage_account_name, access_key, suffix=None, custom_headers=None, raw=False, **operation_config): + """Updates the specified Data Lake Analytics account to add an Azure + Storage account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + which to add the Azure Storage account. + :type account_name: str + :param storage_account_name: The name of the Azure Storage account to + add + :type storage_account_name: str + :param access_key: the access key associated with this Azure Storage + account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = models.AddStorageAccountParameters(access_key=access_key, suffix=suffix) + + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'AddStorageAccountParameters') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def update( + self, resource_group_name, account_name, storage_account_name, access_key=None, suffix=None, custom_headers=None, raw=False, **operation_config): + """Updates the Data Lake Analytics account to replace Azure Storage blob + account details, such as the access key and/or suffix. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account to + modify storage accounts in + :type account_name: str + :param storage_account_name: The Azure Storage account to modify + :type storage_account_name: str + :param access_key: the updated access key associated with this Azure + Storage account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = None + if access_key is not None or suffix is not None: + parameters = models.UpdateStorageAccountParameters(access_key=access_key, suffix=suffix) + + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'UpdateStorageAccountParameters') + else: + body_content = None + + # Construct and send request + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete( + self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): + """Updates the specified Data Lake Analytics account to remove an Azure + Storage account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to remove the Azure Storage account. + :type account_name: str + :param storage_account_name: The name of the Azure Storage account to + remove + :type storage_account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def get( + self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Azure Storage account linked to the given Data Lake + Analytics account. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which to retrieve Azure storage account details. + :type account_name: str + :param storage_account_name: The name of the Azure Storage account for + which to retrieve the details. + :type storage_account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`StorageAccountInfo + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`StorageAccountInfo + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('StorageAccountInfo', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def get_storage_container( + self, resource_group_name, account_name, storage_account_name, container_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Azure Storage container associated with the given + Data Lake Analytics and Azure Storage accounts. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account for + which to retrieve blob container. + :type account_name: str + :param storage_account_name: The name of the Azure storage account + from which to retrieve the blob container. + :type storage_account_name: str + :param container_name: The name of the Azure storage container to + retrieve + :type container_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`StorageContainer + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`StorageContainer + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}/Containers/{containerName}' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'containerName': self._serialize.url("container_name", container_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('StorageContainer', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_storage_containers( + self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): + """Lists the Azure Storage containers, if any, associated with the + specified Data Lake Analytics and Azure Storage account combination. + The response includes a link to the next page of results, if any. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account for + which to list Azure Storage blob containers. + :type account_name: str + :param storage_account_name: The name of the Azure storage account + from which to list blob containers. + :type storage_account_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`StorageContainer + ` + :rtype: :class:`StorageContainerPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}/Containers' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.StorageContainerPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.StorageContainerPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_sas_tokens( + self, resource_group_name, account_name, storage_account_name, container_name, custom_headers=None, raw=False, **operation_config): + """Gets the SAS token associated with the specified Data Lake Analytics + and Azure Storage account and container combination. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account from + which an Azure Storage account's SAS token is being requested. + :type account_name: str + :param storage_account_name: The name of the Azure storage account for + which the SAS token is being requested. + :type storage_account_name: str + :param container_name: The name of the Azure storage container for + which the SAS token is being requested. + :type container_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`SasTokenInfo + ` + :rtype: :class:`SasTokenInfoPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}/Containers/{containerName}/listSasTokens' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'storageAccountName': self._serialize.url("storage_account_name", storage_account_name, 'str'), + 'containerName': self._serialize.url("container_name", container_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.SasTokenInfoPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.SasTokenInfoPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_by_account( + self, resource_group_name, account_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Gets the first page of Azure Storage accounts, if any, linked to the + specified Data Lake Analytics account. The response includes a link to + the next page, if any. + + :param resource_group_name: The name of the Azure resource group that + contains the Data Lake Analytics account. + :type resource_group_name: str + :param account_name: The name of the Data Lake Analytics account for + which to list Azure Storage accounts. + :type account_name: str + :param filter: The OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`StorageAccountInfo + ` + :rtype: :class:`StorageAccountInfoPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/' + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.StorageAccountInfoPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.StorageAccountInfoPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/version.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/version.py new file mode 100644 index 00000000000..9a6b4374370 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/account/version.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "0.1.6" + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/__init__.py new file mode 100644 index 00000000000..39a62e8061a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/__init__.py @@ -0,0 +1,18 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .data_lake_analytics_catalog_management_client import DataLakeAnalyticsCatalogManagementClient +from .version import VERSION + +__all__ = ['DataLakeAnalyticsCatalogManagementClient'] + +__version__ = VERSION + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/data_lake_analytics_catalog_management_client.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/data_lake_analytics_catalog_management_client.py new file mode 100644 index 00000000000..729cf43a59b --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/data_lake_analytics_catalog_management_client.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.service_client import ServiceClient +from msrest import Serializer, Deserializer +from msrestazure import AzureConfiguration +from .version import VERSION +from .operations.catalog_operations import CatalogOperations +from . import models + + +class DataLakeAnalyticsCatalogManagementClientConfiguration(AzureConfiguration): + """Configuration for DataLakeAnalyticsCatalogManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param adla_catalog_dns_suffix: Gets the DNS suffix used as the base for + all Azure Data Lake Analytics Catalog service requests. + :type adla_catalog_dns_suffix: str + """ + + def __init__( + self, credentials, adla_catalog_dns_suffix): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if adla_catalog_dns_suffix is None: + raise ValueError("Parameter 'adla_catalog_dns_suffix' must not be None.") + if not isinstance(adla_catalog_dns_suffix, str): + raise TypeError("Parameter 'adla_catalog_dns_suffix' must be str.") + base_url = 'https://{accountName}.{adlaCatalogDnsSuffix}' + + super(DataLakeAnalyticsCatalogManagementClientConfiguration, self).__init__(base_url) + + self.add_user_agent('datalakeanalyticscatalogmanagementclient/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.adla_catalog_dns_suffix = adla_catalog_dns_suffix + + +class DataLakeAnalyticsCatalogManagementClient(object): + """Creates an Azure Data Lake Analytics catalog client. + + :ivar config: Configuration for client. + :vartype config: DataLakeAnalyticsCatalogManagementClientConfiguration + + :ivar catalog: Catalog operations + :vartype catalog: azure.mgmt.datalake.analytics.catalog.operations.CatalogOperations + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param adla_catalog_dns_suffix: Gets the DNS suffix used as the base for + all Azure Data Lake Analytics Catalog service requests. + :type adla_catalog_dns_suffix: str + """ + + def __init__( + self, credentials, adla_catalog_dns_suffix): + + self.config = DataLakeAnalyticsCatalogManagementClientConfiguration(credentials, adla_catalog_dns_suffix) + self._client = ServiceClient(self.config.credentials, self.config) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self.api_version = '2016-11-01' + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.catalog = CatalogOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/__init__.py new file mode 100644 index 00000000000..188e23fef42 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/__init__.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .data_lake_analytics_catalog_secret_create_or_update_parameters import DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters +from .data_lake_analytics_catalog_credential_create_parameters import DataLakeAnalyticsCatalogCredentialCreateParameters +from .data_lake_analytics_catalog_credential_delete_parameters import DataLakeAnalyticsCatalogCredentialDeleteParameters +from .data_lake_analytics_catalog_credential_update_parameters import DataLakeAnalyticsCatalogCredentialUpdateParameters +from .usql_secret import USqlSecret +from .usql_external_data_source import USqlExternalDataSource +from .usql_credential import USqlCredential +from .usql_procedure import USqlProcedure +from .usql_table_column import USqlTableColumn +from .usql_directed_column import USqlDirectedColumn +from .usql_distribution_info import USqlDistributionInfo +from .usql_index import USqlIndex +from .ddl_name import DdlName +from .entity_id import EntityId +from .external_table import ExternalTable +from .type_field_info import TypeFieldInfo +from .usql_table import USqlTable +from .usql_table_type import USqlTableType +from .usql_view import USqlView +from .usql_package import USqlPackage +from .usql_table_partition import USqlTablePartition +from .usql_table_statistics import USqlTableStatistics +from .usql_type import USqlType +from .usql_table_valued_function import USqlTableValuedFunction +from .usql_assembly_file_info import USqlAssemblyFileInfo +from .usql_assembly_dependency_info import USqlAssemblyDependencyInfo +from .usql_assembly import USqlAssembly +from .usql_assembly_clr import USqlAssemblyClr +from .usql_schema import USqlSchema +from .usql_database import USqlDatabase +from .catalog_item import CatalogItem +from .catalog_item_list import CatalogItemList +from .usql_credential_paged import USqlCredentialPaged +from .usql_external_data_source_paged import USqlExternalDataSourcePaged +from .usql_procedure_paged import USqlProcedurePaged +from .usql_table_paged import USqlTablePaged +from .usql_table_statistics_paged import USqlTableStatisticsPaged +from .usql_table_type_paged import USqlTableTypePaged +from .usql_package_paged import USqlPackagePaged +from .usql_view_paged import USqlViewPaged +from .usql_table_partition_paged import USqlTablePartitionPaged +from .usql_type_paged import USqlTypePaged +from .usql_table_valued_function_paged import USqlTableValuedFunctionPaged +from .usql_assembly_clr_paged import USqlAssemblyClrPaged +from .usql_schema_paged import USqlSchemaPaged +from .usql_database_paged import USqlDatabasePaged +from .data_lake_analytics_catalog_management_client_enums import ( + FileType, +) + +__all__ = [ + 'DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters', + 'DataLakeAnalyticsCatalogCredentialCreateParameters', + 'DataLakeAnalyticsCatalogCredentialDeleteParameters', + 'DataLakeAnalyticsCatalogCredentialUpdateParameters', + 'USqlSecret', + 'USqlExternalDataSource', + 'USqlCredential', + 'USqlProcedure', + 'USqlTableColumn', + 'USqlDirectedColumn', + 'USqlDistributionInfo', + 'USqlIndex', + 'DdlName', + 'EntityId', + 'ExternalTable', + 'TypeFieldInfo', + 'USqlTable', + 'USqlTableType', + 'USqlView', + 'USqlPackage', + 'USqlTablePartition', + 'USqlTableStatistics', + 'USqlType', + 'USqlTableValuedFunction', + 'USqlAssemblyFileInfo', + 'USqlAssemblyDependencyInfo', + 'USqlAssembly', + 'USqlAssemblyClr', + 'USqlSchema', + 'USqlDatabase', + 'CatalogItem', + 'CatalogItemList', + 'USqlCredentialPaged', + 'USqlExternalDataSourcePaged', + 'USqlProcedurePaged', + 'USqlTablePaged', + 'USqlTableStatisticsPaged', + 'USqlTableTypePaged', + 'USqlPackagePaged', + 'USqlViewPaged', + 'USqlTablePartitionPaged', + 'USqlTypePaged', + 'USqlTableValuedFunctionPaged', + 'USqlAssemblyClrPaged', + 'USqlSchemaPaged', + 'USqlDatabasePaged', + 'FileType', +] diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/catalog_item.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/catalog_item.py new file mode 100644 index 00000000000..cce66f94f3a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/catalog_item.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CatalogItem(Model): + """A Data Lake Analytics catalog item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None): + self.compute_account_name = compute_account_name + self.version = version diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/catalog_item_list.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/catalog_item_list.py new file mode 100644 index 00000000000..6931091e6f8 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/catalog_item_list.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CatalogItemList(Model): + """A Data Lake Analytics catalog item list. + + :param next_link: the link to the next page of results. + :type next_link: str + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, next_link=None): + self.next_link = next_link diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_create_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_create_parameters.py new file mode 100644 index 00000000000..a82a7a43c0c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_create_parameters.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsCatalogCredentialCreateParameters(Model): + """Data Lake Analytics catalog credential creation parameters. + + :param password: the password for the credential and user with access to + the data source. + :type password: str + :param uri: the URI identifier for the data source this credential can + connect to in the format : + :type uri: str + :param user_id: the object identifier for the user associated with this + credential with access to the data source. + :type user_id: str + """ + + _validation = { + 'password': {'required': True}, + 'uri': {'required': True}, + 'user_id': {'required': True}, + } + + _attribute_map = { + 'password': {'key': 'password', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__(self, password, uri, user_id): + self.password = password + self.uri = uri + self.user_id = user_id diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_delete_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_delete_parameters.py new file mode 100644 index 00000000000..e4b9d7154ff --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_delete_parameters.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsCatalogCredentialDeleteParameters(Model): + """Data Lake Analytics catalog credential deletion parameters. + + :param password: the current password for the credential and user with + access to the data source. This is required if the requester is not the + account owner. + :type password: str + """ + + _attribute_map = { + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__(self, password=None): + self.password = password diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_update_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_update_parameters.py new file mode 100644 index 00000000000..202db206170 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_credential_update_parameters.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsCatalogCredentialUpdateParameters(Model): + """Data Lake Analytics catalog credential update parameters. + + :param password: the current password for the credential and user with + access to the data source. This is required if the requester is not the + account owner. + :type password: str + :param new_password: the new password for the credential and user with + access to the data source. + :type new_password: str + :param uri: the URI identifier for the data source this credential can + connect to in the format : + :type uri: str + :param user_id: the object identifier for the user associated with this + credential with access to the data source. + :type user_id: str + """ + + _attribute_map = { + 'password': {'key': 'password', 'type': 'str'}, + 'new_password': {'key': 'newPassword', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__(self, password=None, new_password=None, uri=None, user_id=None): + self.password = password + self.new_password = new_password + self.uri = uri + self.user_id = user_id diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_management_client_enums.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_management_client_enums.py new file mode 100644 index 00000000000..ff37fe2c01c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_management_client_enums.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + + +class FileType(Enum): + + assembly = "Assembly" + resource = "Resource" + nodeploy = "Nodeploy" diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_secret_create_or_update_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_secret_create_or_update_parameters.py new file mode 100644 index 00000000000..2be2b55ec3f --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/data_lake_analytics_catalog_secret_create_or_update_parameters.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters(Model): + """Data Lake Analytics catalog secret creation and update parameters. This is + deprecated and will be removed in the next release. Please use + DataLakeAnalyticsCatalogCredentialCreateOrUpdateParameters instead. + + :param password: the password for the secret to pass in + :type password: str + :param uri: the URI identifier for the secret in the format + : + :type uri: str + """ + + _validation = { + 'password': {'required': True}, + } + + _attribute_map = { + 'password': {'key': 'password', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + } + + def __init__(self, password, uri=None): + self.password = password + self.uri = uri diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/ddl_name.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/ddl_name.py new file mode 100644 index 00000000000..9791ea31c7c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/ddl_name.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DdlName(Model): + """A Data Lake Analytics DDL name item. + + :param first_part: the name of the table associated with this database and + schema. + :type first_part: str + :param second_part: the name of the table associated with this database + and schema. + :type second_part: str + :param third_part: the name of the table associated with this database and + schema. + :type third_part: str + :param server: the name of the table associated with this database and + schema. + :type server: str + """ + + _attribute_map = { + 'first_part': {'key': 'firstPart', 'type': 'str'}, + 'second_part': {'key': 'secondPart', 'type': 'str'}, + 'third_part': {'key': 'thirdPart', 'type': 'str'}, + 'server': {'key': 'server', 'type': 'str'}, + } + + def __init__(self, first_part=None, second_part=None, third_part=None, server=None): + self.first_part = first_part + self.second_part = second_part + self.third_part = third_part + self.server = server diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/entity_id.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/entity_id.py new file mode 100644 index 00000000000..bed40f424cd --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/entity_id.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityId(Model): + """A Data Lake Analytics catalog entity identifier object. + + :param name: the name of the external table associated with this database, + schema and table. + :type name: :class:`DdlName + ` + :param version: the version of the external data source. + :type version: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'DdlName'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__(self, name=None, version=None): + self.name = name + self.version = version diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/external_table.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/external_table.py new file mode 100644 index 00000000000..b7443ffc35f --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/external_table.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExternalTable(Model): + """A Data Lake Analytics catalog external table item. + + :param table_name: the name of the table associated with this database and + schema. + :type table_name: str + :param data_source: the data source associated with this external table. + :type data_source: :class:`EntityId + ` + """ + + _attribute_map = { + 'table_name': {'key': 'tableName', 'type': 'str'}, + 'data_source': {'key': 'dataSource', 'type': 'EntityId'}, + } + + def __init__(self, table_name=None, data_source=None): + self.table_name = table_name + self.data_source = data_source diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/type_field_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/type_field_info.py new file mode 100644 index 00000000000..3f0b5c854bc --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/type_field_info.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TypeFieldInfo(Model): + """A Data Lake Analytics catalog type field information item. + + :param name: the name of the field associated with this type. + :type name: str + :param type: the type of the field. + :type type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, name=None, type=None): + self.name = name + self.type = type diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly.py new file mode 100644 index 00000000000..ddca1d8d597 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlAssembly(CatalogItem): + """A Data Lake Analytics catalog U-SQL Assembly. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param name: the name of the assembly. + :type name: str + :param clr_name: the name of the CLR. + :type clr_name: str + :param is_visible: the switch indicating if this assembly is visible or + not. + :type is_visible: bool + :param is_user_defined: the switch indicating if this assembly is user + defined or not. + :type is_user_defined: bool + :param files: the list of files associated with the assembly + :type files: list of :class:`USqlAssemblyFileInfo + ` + :param dependencies: the list of dependencies associated with the assembly + :type dependencies: list of :class:`USqlAssemblyDependencyInfo + ` + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'name': {'key': 'assemblyName', 'type': 'str'}, + 'clr_name': {'key': 'clrName', 'type': 'str'}, + 'is_visible': {'key': 'isVisible', 'type': 'bool'}, + 'is_user_defined': {'key': 'isUserDefined', 'type': 'bool'}, + 'files': {'key': 'files', 'type': '[USqlAssemblyFileInfo]'}, + 'dependencies': {'key': 'dependencies', 'type': '[USqlAssemblyDependencyInfo]'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, name=None, clr_name=None, is_visible=None, is_user_defined=None, files=None, dependencies=None): + super(USqlAssembly, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.name = name + self.clr_name = clr_name + self.is_visible = is_visible + self.is_user_defined = is_user_defined + self.files = files + self.dependencies = dependencies diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_clr.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_clr.py new file mode 100644 index 00000000000..ab4c6bb9567 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_clr.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlAssemblyClr(CatalogItem): + """A Data Lake Analytics catalog U-SQL assembly CLR item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param name: the name of the assembly. + :type name: str + :param clr_name: the name of the CLR. + :type clr_name: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'name': {'key': 'assemblyClrName', 'type': 'str'}, + 'clr_name': {'key': 'clrName', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, name=None, clr_name=None): + super(USqlAssemblyClr, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.name = name + self.clr_name = clr_name diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_clr_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_clr_paged.py new file mode 100644 index 00000000000..caae702d9f0 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_clr_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlAssemblyClrPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlAssemblyClr ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlAssemblyClr]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlAssemblyClrPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_dependency_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_dependency_info.py new file mode 100644 index 00000000000..5517879e36a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_dependency_info.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class USqlAssemblyDependencyInfo(Model): + """A Data Lake Analytics catalog U-SQL dependency information item. + + :param entity_id: the EntityId of the dependency. + :type entity_id: :class:`EntityId + ` + """ + + _attribute_map = { + 'entity_id': {'key': 'entityId', 'type': 'EntityId'}, + } + + def __init__(self, entity_id=None): + self.entity_id = entity_id diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_file_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_file_info.py new file mode 100644 index 00000000000..39c9f0f1f74 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_assembly_file_info.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class USqlAssemblyFileInfo(Model): + """A Data Lake Analytics catalog U-SQL assembly file information item. + + :param type: the assembly file type. Possible values include: 'Assembly', + 'Resource', 'Nodeploy' + :type type: str or :class:`FileType + ` + :param original_path: the the original path to the assembly file. + :type original_path: str + :param content_path: the the content path to the assembly file. + :type content_path: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'original_path': {'key': 'originalPath', 'type': 'str'}, + 'content_path': {'key': 'contentPath', 'type': 'str'}, + } + + def __init__(self, type=None, original_path=None, content_path=None): + self.type = type + self.original_path = original_path + self.content_path = content_path diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_credential.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_credential.py new file mode 100644 index 00000000000..292d9c9d7ab --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_credential.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlCredential(CatalogItem): + """A Data Lake Analytics catalog U-SQL credential item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param name: the name of the credential. + :type name: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'name': {'key': 'credentialName', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, name=None): + super(USqlCredential, self).__init__(compute_account_name=compute_account_name, version=version) + self.name = name diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_credential_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_credential_paged.py new file mode 100644 index 00000000000..fd7b50a9ae6 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_credential_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlCredentialPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlCredential ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlCredential]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlCredentialPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_database.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_database.py new file mode 100644 index 00000000000..ff1657b30b3 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_database.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlDatabase(CatalogItem): + """A Data Lake Analytics catalog U-SQL database item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param name: the name of the database. + :type name: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'name': {'key': 'databaseName', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, name=None): + super(USqlDatabase, self).__init__(compute_account_name=compute_account_name, version=version) + self.name = name diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_database_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_database_paged.py new file mode 100644 index 00000000000..acebb850068 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_database_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlDatabasePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlDatabase ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlDatabase]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlDatabasePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_directed_column.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_directed_column.py new file mode 100644 index 00000000000..2e271cbeca1 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_directed_column.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class USqlDirectedColumn(Model): + """A Data Lake Analytics catalog U-SQL directed column item. + + :param name: the name of the index in the table. + :type name: str + :param descending: the switch indicating if the index is descending or + not. + :type descending: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'descending': {'key': 'descending', 'type': 'bool'}, + } + + def __init__(self, name=None, descending=None): + self.name = name + self.descending = descending diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_distribution_info.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_distribution_info.py new file mode 100644 index 00000000000..ffc0f7a5794 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_distribution_info.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class USqlDistributionInfo(Model): + """A Data Lake Analytics catalog U-SQL distribution information object. + + :param type: the type of this distribution. + :type type: int + :param keys: the list of directed columns in the distribution + :type keys: list of :class:`USqlDirectedColumn + ` + :param count: the count of indices using this distribution. + :type count: int + :param dynamic_count: the dynamic count of indices using this + distribution. + :type dynamic_count: int + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'int'}, + 'keys': {'key': 'keys', 'type': '[USqlDirectedColumn]'}, + 'count': {'key': 'count', 'type': 'int'}, + 'dynamic_count': {'key': 'dynamicCount', 'type': 'int'}, + } + + def __init__(self, type=None, keys=None, count=None, dynamic_count=None): + self.type = type + self.keys = keys + self.count = count + self.dynamic_count = dynamic_count diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_external_data_source.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_external_data_source.py new file mode 100644 index 00000000000..56f1934350c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_external_data_source.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlExternalDataSource(CatalogItem): + """A Data Lake Analytics catalog U-SQL external datasource item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param name: the name of the external data source. + :type name: str + :param provider: the name of the provider for the external data source. + :type provider: str + :param provider_string: the name of the provider string for the external + data source. + :type provider_string: str + :param pushdown_types: the list of types to push down from the external + data source. + :type pushdown_types: list of str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'name': {'key': 'externalDataSourceName', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'provider_string': {'key': 'providerString', 'type': 'str'}, + 'pushdown_types': {'key': 'pushdownTypes', 'type': '[str]'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, name=None, provider=None, provider_string=None, pushdown_types=None): + super(USqlExternalDataSource, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.name = name + self.provider = provider + self.provider_string = provider_string + self.pushdown_types = pushdown_types diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_external_data_source_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_external_data_source_paged.py new file mode 100644 index 00000000000..fc5cf4ae6b1 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_external_data_source_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlExternalDataSourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlExternalDataSource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlExternalDataSource]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlExternalDataSourcePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_index.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_index.py new file mode 100644 index 00000000000..f57608b7246 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_index.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class USqlIndex(Model): + """A Data Lake Analytics catalog U-SQL table index item. + + :param name: the name of the index in the table. + :type name: str + :param index_keys: the list of directed columns in the index + :type index_keys: list of :class:`USqlDirectedColumn + ` + :param columns: the list of columns in the index + :type columns: list of str + :param distribution_info: the distributions info of the index + :type distribution_info: :class:`USqlDistributionInfo + ` + :param partition_function: partition function ID for the index. + :type partition_function: str + :param partition_key_list: the list of partion keys in the index + :type partition_key_list: list of str + :param stream_names: the list of full paths to the streams that contain + this index in the DataLake account. + :type stream_names: list of str + :param is_columnstore: the switch indicating if this index is a + columnstore index. + :type is_columnstore: bool + :param index_id: the ID of this index within the table. + :type index_id: int + :param is_unique: the switch indicating if this index is a unique index. + :type is_unique: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'index_keys': {'key': 'indexKeys', 'type': '[USqlDirectedColumn]'}, + 'columns': {'key': 'columns', 'type': '[str]'}, + 'distribution_info': {'key': 'distributionInfo', 'type': 'USqlDistributionInfo'}, + 'partition_function': {'key': 'partitionFunction', 'type': 'str'}, + 'partition_key_list': {'key': 'partitionKeyList', 'type': '[str]'}, + 'stream_names': {'key': 'streamNames', 'type': '[str]'}, + 'is_columnstore': {'key': 'isColumnstore', 'type': 'bool'}, + 'index_id': {'key': 'indexId', 'type': 'int'}, + 'is_unique': {'key': 'isUnique', 'type': 'bool'}, + } + + def __init__(self, name=None, index_keys=None, columns=None, distribution_info=None, partition_function=None, partition_key_list=None, stream_names=None, is_columnstore=None, index_id=None, is_unique=None): + self.name = name + self.index_keys = index_keys + self.columns = columns + self.distribution_info = distribution_info + self.partition_function = partition_function + self.partition_key_list = partition_key_list + self.stream_names = stream_names + self.is_columnstore = is_columnstore + self.index_id = index_id + self.is_unique = is_unique diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_package.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_package.py new file mode 100644 index 00000000000..bcb62ae92f1 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_package.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlPackage(CatalogItem): + """A Data Lake Analytics catalog U-SQL package item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database containing the package. + :type database_name: str + :param schema_name: the name of the schema associated with this package + and database. + :type schema_name: str + :param name: the name of the package. + :type name: str + :param definition: the definition of the package. + :type definition: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'packageName', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, definition=None): + super(USqlPackage, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.definition = definition diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_package_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_package_paged.py new file mode 100644 index 00000000000..2d8c3977a4f --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_package_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlPackagePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlPackage ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlPackage]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlPackagePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_procedure.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_procedure.py new file mode 100644 index 00000000000..94123ace034 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_procedure.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlProcedure(CatalogItem): + """A Data Lake Analytics catalog U-SQL procedure item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this procedure + and database. + :type schema_name: str + :param name: the name of the procedure. + :type name: str + :param definition: the defined query of the procedure. + :type definition: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'procName', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, definition=None): + super(USqlProcedure, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.definition = definition diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_procedure_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_procedure_paged.py new file mode 100644 index 00000000000..61b23707c7e --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_procedure_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlProcedurePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlProcedure ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlProcedure]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlProcedurePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_schema.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_schema.py new file mode 100644 index 00000000000..a9a2d9f40b5 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_schema.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlSchema(CatalogItem): + """A Data Lake Analytics catalog U-SQL schema item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param name: the name of the schema. + :type name: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'name': {'key': 'schemaName', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, name=None): + super(USqlSchema, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.name = name diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_schema_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_schema_paged.py new file mode 100644 index 00000000000..f8f8d238619 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_schema_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlSchemaPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlSchema ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlSchema]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlSchemaPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_secret.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_secret.py new file mode 100644 index 00000000000..17113d2b7b7 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_secret.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlSecret(CatalogItem): + """A Data Lake Analytics catalog U-SQL secret item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param name: the name of the secret. + :type name: str + :param creation_time: the creation time of the credential object. This is + the only information returned about a secret from a GET. + :type creation_time: datetime + :param uri: the URI identifier for the secret in the format + : + :type uri: str + :param password: the password for the secret to pass in + :type password: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'name': {'key': 'secretName', 'type': 'str'}, + 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, name=None, creation_time=None, uri=None, password=None): + super(USqlSecret, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.name = name + self.creation_time = creation_time + self.uri = uri + self.password = password diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table.py new file mode 100644 index 00000000000..6a0c0bde871 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlTable(CatalogItem): + """A Data Lake Analytics catalog U-SQL table item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this table and + database. + :type schema_name: str + :param name: the name of the table. + :type name: str + :param column_list: the list of columns in this table + :type column_list: list of :class:`USqlTableColumn + ` + :param index_list: the list of indices in this table + :type index_list: list of :class:`USqlIndex + ` + :param partition_key_list: the list of partition keys in the table + :type partition_key_list: list of str + :param external_table: the external table associated with the table. + :type external_table: :class:`ExternalTable + ` + :param distribution_info: the distributions info of the table + :type distribution_info: :class:`USqlDistributionInfo + ` + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'tableName', 'type': 'str'}, + 'column_list': {'key': 'columnList', 'type': '[USqlTableColumn]'}, + 'index_list': {'key': 'indexList', 'type': '[USqlIndex]'}, + 'partition_key_list': {'key': 'partitionKeyList', 'type': '[str]'}, + 'external_table': {'key': 'externalTable', 'type': 'ExternalTable'}, + 'distribution_info': {'key': 'distributionInfo', 'type': 'USqlDistributionInfo'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, column_list=None, index_list=None, partition_key_list=None, external_table=None, distribution_info=None): + super(USqlTable, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.column_list = column_list + self.index_list = index_list + self.partition_key_list = partition_key_list + self.external_table = external_table + self.distribution_info = distribution_info diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_column.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_column.py new file mode 100644 index 00000000000..0eed3e31817 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_column.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class USqlTableColumn(Model): + """A Data Lake Analytics catalog U-SQL table column item. + + :param name: the name of the column in the table. + :type name: str + :param type: the object type of the specified column (such as + System.String). + :type type: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, name=None, type=None): + self.name = name + self.type = type diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_paged.py new file mode 100644 index 00000000000..b96cd3580b1 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlTablePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlTable ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlTable]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlTablePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_partition.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_partition.py new file mode 100644 index 00000000000..82c554ffcf3 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_partition.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlTablePartition(CatalogItem): + """A Data Lake Analytics catalog U-SQL table partition item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this table + partition and database. + :type schema_name: str + :param name: the name of the table partition. + :type name: str + :param parent_name: the Ddl object of the partition's parent. + :type parent_name: :class:`DdlName + ` + :param index_id: the index ID for this partition. + :type index_id: int + :param label: the list of labels associated with this partition. + :type label: list of str + :param create_date: the creation time of the partition + :type create_date: datetime + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'partitionName', 'type': 'str'}, + 'parent_name': {'key': 'parentName', 'type': 'DdlName'}, + 'index_id': {'key': 'indexId', 'type': 'int'}, + 'label': {'key': 'label', 'type': '[str]'}, + 'create_date': {'key': 'createDate', 'type': 'iso-8601'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, parent_name=None, index_id=None, label=None, create_date=None): + super(USqlTablePartition, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.parent_name = parent_name + self.index_id = index_id + self.label = label + self.create_date = create_date diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_partition_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_partition_paged.py new file mode 100644 index 00000000000..2b0f1db6ac4 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_partition_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlTablePartitionPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlTablePartition ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlTablePartition]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlTablePartitionPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_statistics.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_statistics.py new file mode 100644 index 00000000000..d25e5f78450 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_statistics.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlTableStatistics(CatalogItem): + """A Data Lake Analytics catalog U-SQL table statistics item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this table and + database. + :type schema_name: str + :param table_name: the name of the table. + :type table_name: str + :param name: the name of the table statistics. + :type name: str + :param user_stat_name: the name of the user statistics. + :type user_stat_name: str + :param stat_data_path: the path to the statistics data. + :type stat_data_path: str + :param create_time: the creation time of the statistics. + :type create_time: datetime + :param update_time: the last time the statistics were updated. + :type update_time: datetime + :param is_user_created: the switch indicating if these statistics are user + created. + :type is_user_created: bool + :param is_auto_created: the switch indicating if these statistics are + automatically created. + :type is_auto_created: bool + :param has_filter: the switch indicating if these statistics have a + filter. + :type has_filter: bool + :param filter_definition: the filter definition for the statistics. + :type filter_definition: str + :param col_names: the list of column names associated with these + statistics. + :type col_names: list of str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'table_name': {'key': 'tableName', 'type': 'str'}, + 'name': {'key': 'statisticsName', 'type': 'str'}, + 'user_stat_name': {'key': 'userStatName', 'type': 'str'}, + 'stat_data_path': {'key': 'statDataPath', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + 'update_time': {'key': 'updateTime', 'type': 'iso-8601'}, + 'is_user_created': {'key': 'isUserCreated', 'type': 'bool'}, + 'is_auto_created': {'key': 'isAutoCreated', 'type': 'bool'}, + 'has_filter': {'key': 'hasFilter', 'type': 'bool'}, + 'filter_definition': {'key': 'filterDefinition', 'type': 'str'}, + 'col_names': {'key': 'colNames', 'type': '[str]'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, table_name=None, name=None, user_stat_name=None, stat_data_path=None, create_time=None, update_time=None, is_user_created=None, is_auto_created=None, has_filter=None, filter_definition=None, col_names=None): + super(USqlTableStatistics, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.table_name = table_name + self.name = name + self.user_stat_name = user_stat_name + self.stat_data_path = stat_data_path + self.create_time = create_time + self.update_time = update_time + self.is_user_created = is_user_created + self.is_auto_created = is_auto_created + self.has_filter = has_filter + self.filter_definition = filter_definition + self.col_names = col_names diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_statistics_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_statistics_paged.py new file mode 100644 index 00000000000..47fa3fd26bc --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_statistics_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlTableStatisticsPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlTableStatistics ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlTableStatistics]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlTableStatisticsPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_type.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_type.py new file mode 100644 index 00000000000..d2a473fb05e --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_type.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .usql_type import USqlType + + +class USqlTableType(USqlType): + """A Data Lake Analytics catalog U-SQL table type item. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this table and + database. + :type schema_name: str + :param name: the name of type for this type. + :type name: str + :param type_family: the type family for this type. + :type type_family: str + :param c_sharp_name: the C# name for this type. + :type c_sharp_name: str + :param full_csharp_name: the fully qualified C# name for this type. + :type full_csharp_name: str + :param system_type_id: the system type ID for this type. + :type system_type_id: int + :param user_type_id: the user type ID for this type. + :type user_type_id: int + :param schema_id: the schema ID for this type. + :type schema_id: int + :param principal_id: the principal ID for this type. + :type principal_id: int + :param is_nullable: the the switch indicating if this type is nullable. + :type is_nullable: bool + :param is_user_defined: the the switch indicating if this type is user + defined. + :type is_user_defined: bool + :param is_assembly_type: the the switch indicating if this type is an + assembly type. + :type is_assembly_type: bool + :param is_table_type: the the switch indicating if this type is a table + type. + :type is_table_type: bool + :param is_complex_type: the the switch indicating if this type is a + complex type. + :type is_complex_type: bool + :ivar columns: the type field information associated with this table type. + :vartype columns: list of :class:`TypeFieldInfo + ` + """ + + _validation = { + 'columns': {'readonly': True}, + } + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'typeName', 'type': 'str'}, + 'type_family': {'key': 'typeFamily', 'type': 'str'}, + 'c_sharp_name': {'key': 'cSharpName', 'type': 'str'}, + 'full_csharp_name': {'key': 'fullCSharpName', 'type': 'str'}, + 'system_type_id': {'key': 'systemTypeId', 'type': 'int'}, + 'user_type_id': {'key': 'userTypeId', 'type': 'int'}, + 'schema_id': {'key': 'schemaId', 'type': 'int'}, + 'principal_id': {'key': 'principalId', 'type': 'int'}, + 'is_nullable': {'key': 'isNullable', 'type': 'bool'}, + 'is_user_defined': {'key': 'isUserDefined', 'type': 'bool'}, + 'is_assembly_type': {'key': 'isAssemblyType', 'type': 'bool'}, + 'is_table_type': {'key': 'isTableType', 'type': 'bool'}, + 'is_complex_type': {'key': 'isComplexType', 'type': 'bool'}, + 'columns': {'key': 'columns', 'type': '[TypeFieldInfo]'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, type_family=None, c_sharp_name=None, full_csharp_name=None, system_type_id=None, user_type_id=None, schema_id=None, principal_id=None, is_nullable=None, is_user_defined=None, is_assembly_type=None, is_table_type=None, is_complex_type=None): + super(USqlTableType, self).__init__(compute_account_name=compute_account_name, version=version, database_name=database_name, schema_name=schema_name, name=name, type_family=type_family, c_sharp_name=c_sharp_name, full_csharp_name=full_csharp_name, system_type_id=system_type_id, user_type_id=user_type_id, schema_id=schema_id, principal_id=principal_id, is_nullable=is_nullable, is_user_defined=is_user_defined, is_assembly_type=is_assembly_type, is_table_type=is_table_type, is_complex_type=is_complex_type) + self.columns = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_type_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_type_paged.py new file mode 100644 index 00000000000..9cda3f159c8 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_type_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlTableTypePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlTableType ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlTableType]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlTableTypePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_valued_function.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_valued_function.py new file mode 100644 index 00000000000..70d2c94a50b --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_valued_function.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlTableValuedFunction(CatalogItem): + """A Data Lake Analytics catalog U-SQL table valued function item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this database. + :type schema_name: str + :param name: the name of the table valued function. + :type name: str + :param definition: the definition of the table valued function. + :type definition: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'tvfName', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, definition=None): + super(USqlTableValuedFunction, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.definition = definition diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_valued_function_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_valued_function_paged.py new file mode 100644 index 00000000000..36065b69b52 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_table_valued_function_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlTableValuedFunctionPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlTableValuedFunction ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlTableValuedFunction]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlTableValuedFunctionPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_type.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_type.py new file mode 100644 index 00000000000..680abeb5447 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_type.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlType(CatalogItem): + """A Data Lake Analytics catalog U-SQL type item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this table and + database. + :type schema_name: str + :param name: the name of type for this type. + :type name: str + :param type_family: the type family for this type. + :type type_family: str + :param c_sharp_name: the C# name for this type. + :type c_sharp_name: str + :param full_csharp_name: the fully qualified C# name for this type. + :type full_csharp_name: str + :param system_type_id: the system type ID for this type. + :type system_type_id: int + :param user_type_id: the user type ID for this type. + :type user_type_id: int + :param schema_id: the schema ID for this type. + :type schema_id: int + :param principal_id: the principal ID for this type. + :type principal_id: int + :param is_nullable: the the switch indicating if this type is nullable. + :type is_nullable: bool + :param is_user_defined: the the switch indicating if this type is user + defined. + :type is_user_defined: bool + :param is_assembly_type: the the switch indicating if this type is an + assembly type. + :type is_assembly_type: bool + :param is_table_type: the the switch indicating if this type is a table + type. + :type is_table_type: bool + :param is_complex_type: the the switch indicating if this type is a + complex type. + :type is_complex_type: bool + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'typeName', 'type': 'str'}, + 'type_family': {'key': 'typeFamily', 'type': 'str'}, + 'c_sharp_name': {'key': 'cSharpName', 'type': 'str'}, + 'full_csharp_name': {'key': 'fullCSharpName', 'type': 'str'}, + 'system_type_id': {'key': 'systemTypeId', 'type': 'int'}, + 'user_type_id': {'key': 'userTypeId', 'type': 'int'}, + 'schema_id': {'key': 'schemaId', 'type': 'int'}, + 'principal_id': {'key': 'principalId', 'type': 'int'}, + 'is_nullable': {'key': 'isNullable', 'type': 'bool'}, + 'is_user_defined': {'key': 'isUserDefined', 'type': 'bool'}, + 'is_assembly_type': {'key': 'isAssemblyType', 'type': 'bool'}, + 'is_table_type': {'key': 'isTableType', 'type': 'bool'}, + 'is_complex_type': {'key': 'isComplexType', 'type': 'bool'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, type_family=None, c_sharp_name=None, full_csharp_name=None, system_type_id=None, user_type_id=None, schema_id=None, principal_id=None, is_nullable=None, is_user_defined=None, is_assembly_type=None, is_table_type=None, is_complex_type=None): + super(USqlType, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.type_family = type_family + self.c_sharp_name = c_sharp_name + self.full_csharp_name = full_csharp_name + self.system_type_id = system_type_id + self.user_type_id = user_type_id + self.schema_id = schema_id + self.principal_id = principal_id + self.is_nullable = is_nullable + self.is_user_defined = is_user_defined + self.is_assembly_type = is_assembly_type + self.is_table_type = is_table_type + self.is_complex_type = is_complex_type diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_type_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_type_paged.py new file mode 100644 index 00000000000..f079ab67436 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_type_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlTypePaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlType ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlType]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlTypePaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_view.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_view.py new file mode 100644 index 00000000000..54a30de755c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_view.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_item import CatalogItem + + +class USqlView(CatalogItem): + """A Data Lake Analytics catalog U-SQL view item. + + :param compute_account_name: the name of the Data Lake Analytics account. + :type compute_account_name: str + :param version: the version of the catalog item. + :type version: str + :param database_name: the name of the database. + :type database_name: str + :param schema_name: the name of the schema associated with this view and + database. + :type schema_name: str + :param name: the name of the view. + :type name: str + :param definition: the defined query of the view. + :type definition: str + """ + + _attribute_map = { + 'compute_account_name': {'key': 'computeAccountName', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'schema_name': {'key': 'schemaName', 'type': 'str'}, + 'name': {'key': 'viewName', 'type': 'str'}, + 'definition': {'key': 'definition', 'type': 'str'}, + } + + def __init__(self, compute_account_name=None, version=None, database_name=None, schema_name=None, name=None, definition=None): + super(USqlView, self).__init__(compute_account_name=compute_account_name, version=version) + self.database_name = database_name + self.schema_name = schema_name + self.name = name + self.definition = definition diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_view_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_view_paged.py new file mode 100644 index 00000000000..5536566708a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/models/usql_view_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class USqlViewPaged(Paged): + """ + A paging container for iterating over a list of :class:`USqlView ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[USqlView]'} + } + + def __init__(self, *args, **kwargs): + + super(USqlViewPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/operations/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/operations/__init__.py new file mode 100644 index 00000000000..da1ff05e429 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/operations/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .catalog_operations import CatalogOperations + +__all__ = [ + 'CatalogOperations', +] diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/operations/catalog_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/operations/catalog_operations.py new file mode 100644 index 00000000000..f4b47017681 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/operations/catalog_operations.py @@ -0,0 +1,3588 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class CatalogOperations(object): + """CatalogOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def create_secret( + self, account_name, database_name, secret_name, password, uri=None, custom_headers=None, raw=False, **operation_config): + """Creates the specified secret for use with external data sources in the + specified database. This is deprecated and will be removed in the next + release. Please use CreateCredential instead. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database in which to create the + secret. + :type database_name: str + :param secret_name: The name of the secret. + :type secret_name: str + :param password: the password for the secret to pass in + :type password: str + :param uri: the URI identifier for the secret in the format + : + :type uri: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = models.DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters(password=password, uri=uri) + + # Construct URL + url = '/catalog/usql/databases/{databaseName}/secrets/{secretName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'secretName': self._serialize.url("secret_name", secret_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def update_secret( + self, account_name, database_name, secret_name, password, uri=None, custom_headers=None, raw=False, **operation_config): + """Modifies the specified secret for use with external data sources in the + specified database. This is deprecated and will be removed in the next + release. Please use UpdateCredential instead. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the secret. + :type database_name: str + :param secret_name: The name of the secret. + :type secret_name: str + :param password: the password for the secret to pass in + :type password: str + :param uri: the URI identifier for the secret in the format + : + :type uri: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = models.DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters(password=password, uri=uri) + + # Construct URL + url = '/catalog/usql/databases/{databaseName}/secrets/{secretName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'secretName': self._serialize.url("secret_name", secret_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters') + + # Construct and send request + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete_secret( + self, account_name, database_name, secret_name, custom_headers=None, raw=False, **operation_config): + """Deletes the specified secret in the specified database. This is + deprecated and will be removed in the next release. Please use + DeleteCredential instead. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the secret. + :type database_name: str + :param secret_name: The name of the secret to delete + :type secret_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/secrets/{secretName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'secretName': self._serialize.url("secret_name", secret_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def get_secret( + self, account_name, database_name, secret_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified secret in the specified database. This is deprecated + and will be removed in the next release. Please use GetCredential + instead. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the secret. + :type database_name: str + :param secret_name: The name of the secret to get + :type secret_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlSecret + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlSecret + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/secrets/{secretName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'secretName': self._serialize.url("secret_name", secret_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlSecret', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def delete_all_secrets( + self, account_name, database_name, custom_headers=None, raw=False, **operation_config): + """Deletes all secrets in the specified database. This is deprecated and + will be removed in the next release. In the future, please only drop + individual credentials using DeleteCredential. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the secret. + :type database_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/secrets' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def create_credential( + self, account_name, database_name, credential_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates the specified credential for use with external data sources in + the specified database. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database in which to create the + credential. Note: This is NOT an external database name, but the name + of an existing U-SQL database that should contain the new credential + object. + :type database_name: str + :param credential_name: The name of the credential. + :type credential_name: str + :param parameters: The parameters required to create the credential + (name and password) + :type parameters: + :class:`DataLakeAnalyticsCatalogCredentialCreateParameters + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/credentials/{credentialName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'credentialName': self._serialize.url("credential_name", credential_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogCredentialCreateParameters') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def update_credential( + self, account_name, database_name, credential_name, parameters, custom_headers=None, raw=False, **operation_config): + """Modifies the specified credential for use with external data sources in + the specified database. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + credential. + :type database_name: str + :param credential_name: The name of the credential. + :type credential_name: str + :param parameters: The parameters required to modify the credential + (name and password) + :type parameters: + :class:`DataLakeAnalyticsCatalogCredentialUpdateParameters + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/credentials/{credentialName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'credentialName': self._serialize.url("credential_name", credential_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogCredentialUpdateParameters') + + # Construct and send request + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def delete_credential( + self, account_name, database_name, credential_name, cascade=False, password=None, custom_headers=None, raw=False, **operation_config): + """Deletes the specified credential in the specified database. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + credential. + :type database_name: str + :param credential_name: The name of the credential to delete + :type credential_name: str + :param cascade: Indicates if the delete should be a cascading delete + (which deletes all resources dependent on the credential as well as + the credential) or not. If false will fail if there are any resources + relying on the credential. + :type cascade: bool + :param password: the current password for the credential and user with + access to the data source. This is required if the requester is not + the account owner. + :type password: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + parameters = None + if password is not None: + parameters = models.DataLakeAnalyticsCatalogCredentialDeleteParameters(password=password) + + # Construct URL + url = '/catalog/usql/databases/{databaseName}/credentials/{credentialName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'credentialName': self._serialize.url("credential_name", credential_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if cascade is not None: + query_parameters['cascade'] = self._serialize.query("cascade", cascade, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogCredentialDeleteParameters') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def get_credential( + self, account_name, database_name, credential_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified credential from the Data Lake Analytics + catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the schema. + :type database_name: str + :param credential_name: The name of the credential. + :type credential_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlCredential + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlCredential + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/credentials/{credentialName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'credentialName': self._serialize.url("credential_name", credential_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlCredential', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_credentials( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of credentials from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the schema. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlCredential + ` + :rtype: :class:`USqlCredentialPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/credentials' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlCredentialPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlCredentialPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_external_data_source( + self, account_name, database_name, external_data_source_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified external data source from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the external + data source. + :type database_name: str + :param external_data_source_name: The name of the external data + source. + :type external_data_source_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlExternalDataSource + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlExternalDataSource + ` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/externaldatasources/{externalDataSourceName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'externalDataSourceName': self._serialize.url("external_data_source_name", external_data_source_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlExternalDataSource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_external_data_sources( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of external data sources from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the external + data sources. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlExternalDataSource + ` + :rtype: :class:`USqlExternalDataSourcePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/externaldatasources' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlExternalDataSourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlExternalDataSourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_procedure( + self, account_name, database_name, schema_name, procedure_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified procedure from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + procedure. + :type database_name: str + :param schema_name: The name of the schema containing the procedure. + :type schema_name: str + :param procedure_name: The name of the procedure. + :type procedure_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlProcedure + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlProcedure + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/procedures/{procedureName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'procedureName': self._serialize.url("procedure_name", procedure_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlProcedure', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_procedures( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of procedures from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + procedures. + :type database_name: str + :param schema_name: The name of the schema containing the procedures. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlProcedure + ` + :rtype: :class:`USqlProcedurePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/procedures' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlProcedurePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlProcedurePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_table( + self, account_name, database_name, schema_name, table_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified table from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table. + :type database_name: str + :param schema_name: The name of the schema containing the table. + :type schema_name: str + :param table_name: The name of the table. + :type table_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlTable + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlTable + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlTable', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_tables( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, basic=False, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of tables from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the tables. + :type database_name: str + :param schema_name: The name of the schema containing the tables. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param basic: The basic switch indicates what level of information to + return when listing tables. When basic is true, only database_name, + schema_name, table_name and version are returned for each table, + otherwise all table metadata is returned. By default, it is false. + Optional. + :type basic: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTable + ` + :rtype: :class:`USqlTablePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tables' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + if basic is not None: + query_parameters['basic'] = self._serialize.query("basic", basic, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTablePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTablePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_table_statistics_by_database_and_schema( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of all table statistics within the specified schema + from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + statistics. + :type database_name: str + :param schema_name: The name of the schema containing the statistics. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTableStatistics + ` + :rtype: :class:`USqlTableStatisticsPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/statistics' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTableStatisticsPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTableStatisticsPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_table_type( + self, account_name, database_name, schema_name, table_type_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified table type from the Data Lake Analytics + catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table + type. + :type database_name: str + :param schema_name: The name of the schema containing the table type. + :type schema_name: str + :param table_type_name: The name of the table type to retrieve. + :type table_type_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlTableType + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlTableType + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tabletypes/{tableTypeName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableTypeName': self._serialize.url("table_type_name", table_type_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlTableType', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_table_types( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of table types from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table + types. + :type database_name: str + :param schema_name: The name of the schema containing the table types. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTableType + ` + :rtype: :class:`USqlTableTypePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tabletypes' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTableTypePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTableTypePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_package( + self, account_name, database_name, schema_name, package_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified package from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the package. + :type database_name: str + :param schema_name: The name of the schema containing the package. + :type schema_name: str + :param package_name: The name of the package. + :type package_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlPackage + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlPackage + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/packages/{packageName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'packageName': self._serialize.url("package_name", package_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlPackage', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_packages( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of packages from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + packages. + :type database_name: str + :param schema_name: The name of the schema containing the packages. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlPackage + ` + :rtype: :class:`USqlPackagePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/packages' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlPackagePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlPackagePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_view( + self, account_name, database_name, schema_name, view_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified view from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the view. + :type database_name: str + :param schema_name: The name of the schema containing the view. + :type schema_name: str + :param view_name: The name of the view. + :type view_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlView + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlView + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/views/{viewName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'viewName': self._serialize.url("view_name", view_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlView', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_views( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of views from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the views. + :type database_name: str + :param schema_name: The name of the schema containing the views. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlView + ` + :rtype: :class:`USqlViewPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/views' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlViewPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlViewPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_table_statistic( + self, account_name, database_name, schema_name, table_name, statistics_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified table statistics from the Data Lake Analytics + catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + statistics. + :type database_name: str + :param schema_name: The name of the schema containing the statistics. + :type schema_name: str + :param table_name: The name of the table containing the statistics. + :type table_name: str + :param statistics_name: The name of the table statistics. + :type statistics_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlTableStatistics + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlTableStatistics + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/statistics/{statisticsName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + 'statisticsName': self._serialize.url("statistics_name", statistics_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlTableStatistics', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_table_statistics( + self, account_name, database_name, schema_name, table_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of table statistics from the Data Lake Analytics + catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + statistics. + :type database_name: str + :param schema_name: The name of the schema containing the statistics. + :type schema_name: str + :param table_name: The name of the table containing the statistics. + :type table_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTableStatistics + ` + :rtype: :class:`USqlTableStatisticsPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/statistics' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTableStatisticsPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTableStatisticsPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_table_partition( + self, account_name, database_name, schema_name, table_name, partition_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified table partition from the Data Lake Analytics + catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + partition. + :type database_name: str + :param schema_name: The name of the schema containing the partition. + :type schema_name: str + :param table_name: The name of the table containing the partition. + :type table_name: str + :param partition_name: The name of the table partition. + :type partition_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlTablePartition + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlTablePartition + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/partitions/{partitionName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + 'partitionName': self._serialize.url("partition_name", partition_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlTablePartition', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_table_partitions( + self, account_name, database_name, schema_name, table_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of table partitions from the Data Lake Analytics + catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + partitions. + :type database_name: str + :param schema_name: The name of the schema containing the partitions. + :type schema_name: str + :param table_name: The name of the table containing the partitions. + :type table_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTablePartition + ` + :rtype: :class:`USqlTablePartitionPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tables/{tableName}/partitions' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableName': self._serialize.url("table_name", table_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTablePartitionPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTablePartitionPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_types( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of types within the specified database and schema + from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the types. + :type database_name: str + :param schema_name: The name of the schema containing the types. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlType + ` + :rtype: :class:`USqlTypePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/types' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTypePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTypePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_table_valued_function( + self, account_name, database_name, schema_name, table_valued_function_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified table valued function from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table + valued function. + :type database_name: str + :param schema_name: The name of the schema containing the table valued + function. + :type schema_name: str + :param table_valued_function_name: The name of the + tableValuedFunction. + :type table_valued_function_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlTableValuedFunction + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlTableValuedFunction + ` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tablevaluedfunctions/{tableValuedFunctionName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str'), + 'tableValuedFunctionName': self._serialize.url("table_valued_function_name", table_valued_function_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlTableValuedFunction', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_table_valued_functions( + self, account_name, database_name, schema_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of table valued functions from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table + valued functions. + :type database_name: str + :param schema_name: The name of the schema containing the table valued + functions. + :type schema_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTableValuedFunction + ` + :rtype: :class:`USqlTableValuedFunctionPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}/tablevaluedfunctions' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTableValuedFunctionPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTableValuedFunctionPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_assembly( + self, account_name, database_name, assembly_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified assembly from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + assembly. + :type database_name: str + :param assembly_name: The name of the assembly. + :type assembly_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlAssembly + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlAssembly + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/assemblies/{assemblyName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'assemblyName': self._serialize.url("assembly_name", assembly_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlAssembly', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_assemblies( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of assemblies from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the + assembly. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlAssemblyClr + ` + :rtype: :class:`USqlAssemblyClrPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/assemblies' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlAssemblyClrPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlAssemblyClrPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_schema( + self, account_name, database_name, schema_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified schema from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the schema. + :type database_name: str + :param schema_name: The name of the schema. + :type schema_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlSchema + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlSchema + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas/{schemaName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str'), + 'schemaName': self._serialize.url("schema_name", schema_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlSchema', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_schemas( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of schemas from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the schema. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlSchema + ` + :rtype: :class:`USqlSchemaPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/schemas' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlSchemaPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlSchemaPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_table_statistics_by_database( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of all statistics in a database from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table + statistics. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTableStatistics + ` + :rtype: :class:`USqlTableStatisticsPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/statistics' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTableStatisticsPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTableStatisticsPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_tables_by_database( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, basic=False, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of all tables in a database from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the tables. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param basic: The basic switch indicates what level of information to + return when listing tables. When basic is true, only database_name, + schema_name, table_name and version are returned for each table, + otherwise all table metadata is returned. By default, it is false + :type basic: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTable + ` + :rtype: :class:`USqlTablePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/tables' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + if basic is not None: + query_parameters['basic'] = self._serialize.query("basic", basic, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTablePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTablePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_table_valued_functions_by_database( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of all table valued functions in a database from the + Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the table + valued functions. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlTableValuedFunction + ` + :rtype: :class:`USqlTableValuedFunctionPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/tablevaluedfunctions' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlTableValuedFunctionPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlTableValuedFunctionPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def list_views_by_database( + self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of all views in a database from the Data Lake + Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database containing the views. + :type database_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlView + ` + :rtype: :class:`USqlViewPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases/{databaseName}/views' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlViewPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlViewPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get_database( + self, account_name, database_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified database from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param database_name: The name of the database. + :type database_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`USqlDatabase + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlDatabase + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/catalog/usql/databases/{databaseName}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True), + 'databaseName': self._serialize.url("database_name", database_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlDatabase', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list_databases( + self, account_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Retrieves the list of databases from the Data Lake Analytics catalog. + + :param account_name: The Azure Data Lake Analytics account upon which + to execute catalog operations. + :type account_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`USqlDatabase + ` + :rtype: :class:`USqlDatabasePaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/catalog/usql/databases' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaCatalogDnsSuffix': self._serialize.url("self.config.adla_catalog_dns_suffix", self.config.adla_catalog_dns_suffix, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.USqlDatabasePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.USqlDatabasePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/version.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/version.py new file mode 100644 index 00000000000..9a6b4374370 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/catalog/version.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "0.1.6" + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/__init__.py new file mode 100644 index 00000000000..8867563082d --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/__init__.py @@ -0,0 +1,18 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .data_lake_analytics_job_management_client import DataLakeAnalyticsJobManagementClient +from .version import VERSION + +__all__ = ['DataLakeAnalyticsJobManagementClient'] + +__version__ = VERSION + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/data_lake_analytics_job_management_client.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/data_lake_analytics_job_management_client.py new file mode 100644 index 00000000000..d68073d125f --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/data_lake_analytics_job_management_client.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.service_client import ServiceClient +from msrest import Serializer, Deserializer +from msrestazure import AzureConfiguration +from .version import VERSION +from .operations.pipeline_operations import PipelineOperations +from .operations.recurrence_operations import RecurrenceOperations +from .operations.job_operations import JobOperations +from . import models + + +class DataLakeAnalyticsJobManagementClientConfiguration(AzureConfiguration): + """Configuration for DataLakeAnalyticsJobManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param adla_job_dns_suffix: Gets the DNS suffix used as the base for all + Azure Data Lake Analytics Job service requests. + :type adla_job_dns_suffix: str + """ + + def __init__( + self, credentials, adla_job_dns_suffix): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if adla_job_dns_suffix is None: + raise ValueError("Parameter 'adla_job_dns_suffix' must not be None.") + if not isinstance(adla_job_dns_suffix, str): + raise TypeError("Parameter 'adla_job_dns_suffix' must be str.") + base_url = 'https://{accountName}.{adlaJobDnsSuffix}' + + super(DataLakeAnalyticsJobManagementClientConfiguration, self).__init__(base_url) + + self.add_user_agent('datalakeanalyticsjobmanagementclient/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.adla_job_dns_suffix = adla_job_dns_suffix + + +class DataLakeAnalyticsJobManagementClient(object): + """Creates an Azure Data Lake Analytics job client. + + :ivar config: Configuration for client. + :vartype config: DataLakeAnalyticsJobManagementClientConfiguration + + :ivar pipeline: Pipeline operations + :vartype pipeline: azure.mgmt.datalake.analytics.job.operations.PipelineOperations + :ivar recurrence: Recurrence operations + :vartype recurrence: azure.mgmt.datalake.analytics.job.operations.RecurrenceOperations + :ivar job: Job operations + :vartype job: azure.mgmt.datalake.analytics.job.operations.JobOperations + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param adla_job_dns_suffix: Gets the DNS suffix used as the base for all + Azure Data Lake Analytics Job service requests. + :type adla_job_dns_suffix: str + """ + + def __init__( + self, credentials, adla_job_dns_suffix): + + self.config = DataLakeAnalyticsJobManagementClientConfiguration(credentials, adla_job_dns_suffix) + self._client = ServiceClient(self.config.credentials, self.config) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self.api_version = '2016-11-01' + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.pipeline = PipelineOperations( + self._client, self.config, self._serialize, self._deserialize) + self.recurrence = RecurrenceOperations( + self._client, self.config, self._serialize, self._deserialize) + self.job = JobOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/__init__.py new file mode 100644 index 00000000000..d28921edb70 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/__init__.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .job_statistics_vertex_stage import JobStatisticsVertexStage +from .job_statistics import JobStatistics +from .job_data_path import JobDataPath +from .job_state_audit_record import JobStateAuditRecord +from .job_resource import JobResource +from .diagnostics import Diagnostics +from .usql_job_properties import USqlJobProperties +from .hive_job_properties import HiveJobProperties +from .job_properties import JobProperties +from .create_usql_job_properties import CreateUSqlJobProperties +from .create_job_properties import CreateJobProperties +from .job_inner_error import JobInnerError +from .job_error_details import JobErrorDetails +from .job_relationship_properties import JobRelationshipProperties +from .job_pipeline_run_information import JobPipelineRunInformation +from .job_pipeline_information import JobPipelineInformation +from .job_recurrence_information import JobRecurrenceInformation +from .create_job_parameters import CreateJobParameters +from .build_job_parameters import BuildJobParameters +from .base_job_parameters import BaseJobParameters +from .job_information_basic import JobInformationBasic +from .job_information import JobInformation +from .job_pipeline_information_paged import JobPipelineInformationPaged +from .job_recurrence_information_paged import JobRecurrenceInformationPaged +from .job_information_basic_paged import JobInformationBasicPaged +from .data_lake_analytics_job_management_client_enums import ( + JobResourceType, + SeverityTypes, + CompileMode, + JobType, + JobState, + JobResult, +) + +__all__ = [ + 'JobStatisticsVertexStage', + 'JobStatistics', + 'JobDataPath', + 'JobStateAuditRecord', + 'JobResource', + 'Diagnostics', + 'USqlJobProperties', + 'HiveJobProperties', + 'JobProperties', + 'CreateUSqlJobProperties', + 'CreateJobProperties', + 'JobInnerError', + 'JobErrorDetails', + 'JobRelationshipProperties', + 'JobPipelineRunInformation', + 'JobPipelineInformation', + 'JobRecurrenceInformation', + 'CreateJobParameters', + 'BuildJobParameters', + 'BaseJobParameters', + 'JobInformationBasic', + 'JobInformation', + 'JobPipelineInformationPaged', + 'JobRecurrenceInformationPaged', + 'JobInformationBasicPaged', + 'JobResourceType', + 'SeverityTypes', + 'CompileMode', + 'JobType', + 'JobState', + 'JobResult', +] diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/base_job_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/base_job_parameters.py new file mode 100644 index 00000000000..8ff42cdafe6 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/base_job_parameters.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class BaseJobParameters(Model): + """Data Lake Analytics Job Parameters base class for build and submit. + + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :param properties: the job specific properties. + :type properties: :class:`CreateJobProperties + ` + """ + + _validation = { + 'type': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'JobType'}, + 'properties': {'key': 'properties', 'type': 'CreateJobProperties'}, + } + + def __init__(self, type, properties): + self.type = type + self.properties = properties diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/build_job_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/build_job_parameters.py new file mode 100644 index 00000000000..3bc9fe52b14 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/build_job_parameters.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .base_job_parameters import BaseJobParameters + + +class BuildJobParameters(BaseJobParameters): + """The parameters used to build a new Data Lake Analytics job. + + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :param properties: the job specific properties. + :type properties: :class:`CreateJobProperties + ` + :param name: the friendly name of the job to build. + :type name: str + """ + + _validation = { + 'type': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'JobType'}, + 'properties': {'key': 'properties', 'type': 'CreateJobProperties'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, type, properties, name=None): + super(BuildJobParameters, self).__init__(type=type, properties=properties) + self.name = name diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_job_parameters.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_job_parameters.py new file mode 100644 index 00000000000..d8fb4e346f1 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_job_parameters.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .base_job_parameters import BaseJobParameters + + +class CreateJobParameters(BaseJobParameters): + """The parameters used to submit a new Data Lake Analytics job. + + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :param properties: the job specific properties. + :type properties: :class:`CreateJobProperties + ` + :param name: the friendly name of the job to submit. + :type name: str + :param degree_of_parallelism: the degree of parallelism to use for this + job. This must be greater than 0, if set to less than 0 it will default to + 1. Default value: 1 . + :type degree_of_parallelism: int + :param priority: the priority value to use for the current job. Lower + numbers have a higher priority. By default, a job has a priority of 1000. + This must be greater than 0. + :type priority: int + :param log_file_patterns: the list of log file name patterns to find in + the logFolder. '*' is the only matching character allowed. Example format: + jobExecution*.log or *mylog*.txt + :type log_file_patterns: list of str + :param related: the recurring job relationship information properties. + :type related: :class:`JobRelationshipProperties + ` + """ + + _validation = { + 'type': {'required': True}, + 'properties': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'JobType'}, + 'properties': {'key': 'properties', 'type': 'CreateJobProperties'}, + 'name': {'key': 'name', 'type': 'str'}, + 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, + 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, + } + + def __init__(self, type, properties, name, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None): + super(CreateJobParameters, self).__init__(type=type, properties=properties) + self.name = name + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.log_file_patterns = log_file_patterns + self.related = related diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_job_properties.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_job_properties.py new file mode 100644 index 00000000000..c4599162449 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_job_properties.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateJobProperties(Model): + """The common Data Lake Analytics job properties for job submission. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'USql': 'CreateUSqlJobProperties'} + } + + def __init__(self, script, runtime_version=None): + self.runtime_version = runtime_version + self.script = script + self.type = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_usql_job_properties.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_usql_job_properties.py new file mode 100644 index 00000000000..e85a3bcfa20 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/create_usql_job_properties.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .create_job_properties import CreateJobProperties + + +class CreateUSqlJobProperties(CreateJobProperties): + """U-SQL job properties used when submitting U-SQL jobs. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + :param compile_mode: Optionally enforces a specific compilation mode for + the job during execution. If this is not specified during submission, the + server will determine the optimal compilation mode. Possible values + include: 'Semantic', 'Full', 'SingleBox' + :type compile_mode: str or :class:`CompileMode + ` + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compile_mode': {'key': 'compileMode', 'type': 'CompileMode'}, + } + + def __init__(self, script, runtime_version=None, compile_mode=None): + super(CreateUSqlJobProperties, self).__init__(runtime_version=runtime_version, script=script) + self.compile_mode = compile_mode + self.type = 'USql' diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/data_lake_analytics_job_management_client_enums.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/data_lake_analytics_job_management_client_enums.py new file mode 100644 index 00000000000..fdec7b65d26 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/data_lake_analytics_job_management_client_enums.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + + +class JobResourceType(Enum): + + vertex_resource = "VertexResource" + job_manager_resource = "JobManagerResource" + statistics_resource = "StatisticsResource" + vertex_resource_in_user_folder = "VertexResourceInUserFolder" + job_manager_resource_in_user_folder = "JobManagerResourceInUserFolder" + statistics_resource_in_user_folder = "StatisticsResourceInUserFolder" + + +class SeverityTypes(Enum): + + warning = "Warning" + error = "Error" + info = "Info" + severe_warning = "SevereWarning" + deprecated = "Deprecated" + user_warning = "UserWarning" + + +class CompileMode(Enum): + + semantic = "Semantic" + full = "Full" + single_box = "SingleBox" + + +class JobType(Enum): + + usql = "USql" + hive = "Hive" + + +class JobState(Enum): + + accepted = "Accepted" + compiling = "Compiling" + ended = "Ended" + new = "New" + queued = "Queued" + running = "Running" + scheduling = "Scheduling" + starting = "Starting" + paused = "Paused" + waiting_for_capacity = "WaitingForCapacity" + + +class JobResult(Enum): + + none = "None" + succeeded = "Succeeded" + cancelled = "Cancelled" + failed = "Failed" diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/diagnostics.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/diagnostics.py new file mode 100644 index 00000000000..7def54e08cd --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/diagnostics.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Diagnostics(Model): + """Error diagnostic information for failed jobs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar column_number: the column where the error occured. + :vartype column_number: int + :ivar end: the ending index of the error. + :vartype end: int + :ivar line_number: the line number the error occured on. + :vartype line_number: int + :ivar message: the error message. + :vartype message: str + :ivar severity: the severity of the error. Possible values include: + 'Warning', 'Error', 'Info', 'SevereWarning', 'Deprecated', 'UserWarning' + :vartype severity: str or :class:`SeverityTypes + ` + :ivar start: the starting index of the error. + :vartype start: int + """ + + _validation = { + 'column_number': {'readonly': True}, + 'end': {'readonly': True}, + 'line_number': {'readonly': True}, + 'message': {'readonly': True}, + 'severity': {'readonly': True}, + 'start': {'readonly': True}, + } + + _attribute_map = { + 'column_number': {'key': 'columnNumber', 'type': 'int'}, + 'end': {'key': 'end', 'type': 'int'}, + 'line_number': {'key': 'lineNumber', 'type': 'int'}, + 'message': {'key': 'message', 'type': 'str'}, + 'severity': {'key': 'severity', 'type': 'SeverityTypes'}, + 'start': {'key': 'start', 'type': 'int'}, + } + + def __init__(self): + self.column_number = None + self.end = None + self.line_number = None + self.message = None + self.severity = None + self.start = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/hive_job_properties.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/hive_job_properties.py new file mode 100644 index 00000000000..74df4f70dd0 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/hive_job_properties.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .job_properties import JobProperties + + +class HiveJobProperties(JobProperties): + """Hive job properties used when retrieving Hive jobs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + :ivar logs_location: the Hive logs location + :vartype logs_location: str + :ivar output_location: the location of Hive job output files (both + execution output and results) + :vartype output_location: str + :ivar statement_count: the number of statements that will be run based on + the script + :vartype statement_count: int + :ivar executed_statement_count: the number of statements that have been + run based on the script + :vartype executed_statement_count: int + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + 'logs_location': {'readonly': True}, + 'output_location': {'readonly': True}, + 'statement_count': {'readonly': True}, + 'executed_statement_count': {'readonly': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'logs_location': {'key': 'logsLocation', 'type': 'str'}, + 'output_location': {'key': 'outputLocation', 'type': 'str'}, + 'statement_count': {'key': 'statementCount', 'type': 'int'}, + 'executed_statement_count': {'key': 'executedStatementCount', 'type': 'int'}, + } + + def __init__(self, script, runtime_version=None): + super(HiveJobProperties, self).__init__(runtime_version=runtime_version, script=script) + self.logs_location = None + self.output_location = None + self.statement_count = None + self.executed_statement_count = None + self.type = 'Hive' diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_data_path.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_data_path.py new file mode 100644 index 00000000000..394b125f35b --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_data_path.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobDataPath(Model): + """A Data Lake Analytics job data path item. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar job_id: the id of the job this data is for. + :vartype job_id: str + :ivar command: the command that this job data relates to. + :vartype command: str + :ivar paths: the list of paths to all of the job data. + :vartype paths: list of str + """ + + _validation = { + 'job_id': {'readonly': True}, + 'command': {'readonly': True}, + 'paths': {'readonly': True}, + } + + _attribute_map = { + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'paths': {'key': 'paths', 'type': '[str]'}, + } + + def __init__(self): + self.job_id = None + self.command = None + self.paths = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_error_details.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_error_details.py new file mode 100644 index 00000000000..9b160bd9753 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_error_details.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobErrorDetails(Model): + """The Data Lake Analytics job error details. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar description: the error message description + :vartype description: str + :ivar details: the details of the error message. + :vartype details: str + :ivar end_offset: the end offset in the job where the error was found. + :vartype end_offset: int + :ivar error_id: the specific identifier for the type of error encountered + in the job. + :vartype error_id: str + :ivar file_path: the path to any supplemental error files, if any. + :vartype file_path: str + :ivar help_link: the link to MSDN or Azure help for this type of error, if + any. + :vartype help_link: str + :ivar internal_diagnostics: the internal diagnostic stack trace if the + user requesting the job error details has sufficient permissions it will + be retrieved, otherwise it will be empty. + :vartype internal_diagnostics: str + :ivar line_number: the specific line number in the job where the error + occured. + :vartype line_number: int + :ivar message: the user friendly error message for the failure. + :vartype message: str + :ivar resolution: the recommended resolution for the failure, if any. + :vartype resolution: str + :ivar inner_error: the inner error of this specific job error message, if + any. + :vartype inner_error: :class:`JobInnerError + ` + :ivar severity: the severity level of the failure. Possible values + include: 'Warning', 'Error', 'Info', 'SevereWarning', 'Deprecated', + 'UserWarning' + :vartype severity: str or :class:`SeverityTypes + ` + :ivar source: the ultimate source of the failure (usually either SYSTEM or + USER). + :vartype source: str + :ivar start_offset: the start offset in the job where the error was found + :vartype start_offset: int + """ + + _validation = { + 'description': {'readonly': True}, + 'details': {'readonly': True}, + 'end_offset': {'readonly': True}, + 'error_id': {'readonly': True}, + 'file_path': {'readonly': True}, + 'help_link': {'readonly': True}, + 'internal_diagnostics': {'readonly': True}, + 'line_number': {'readonly': True}, + 'message': {'readonly': True}, + 'resolution': {'readonly': True}, + 'inner_error': {'readonly': True}, + 'severity': {'readonly': True}, + 'source': {'readonly': True}, + 'start_offset': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'details': {'key': 'details', 'type': 'str'}, + 'end_offset': {'key': 'endOffset', 'type': 'int'}, + 'error_id': {'key': 'errorId', 'type': 'str'}, + 'file_path': {'key': 'filePath', 'type': 'str'}, + 'help_link': {'key': 'helpLink', 'type': 'str'}, + 'internal_diagnostics': {'key': 'internalDiagnostics', 'type': 'str'}, + 'line_number': {'key': 'lineNumber', 'type': 'int'}, + 'message': {'key': 'message', 'type': 'str'}, + 'resolution': {'key': 'resolution', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'JobInnerError'}, + 'severity': {'key': 'severity', 'type': 'SeverityTypes'}, + 'source': {'key': 'source', 'type': 'str'}, + 'start_offset': {'key': 'startOffset', 'type': 'int'}, + } + + def __init__(self): + self.description = None + self.details = None + self.end_offset = None + self.error_id = None + self.file_path = None + self.help_link = None + self.internal_diagnostics = None + self.line_number = None + self.message = None + self.resolution = None + self.inner_error = None + self.severity = None + self.source = None + self.start_offset = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information.py new file mode 100644 index 00000000000..b4c54c483e3 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .job_information_basic import JobInformationBasic + + +class JobInformation(JobInformationBasic): + """The extended Data Lake Analytics job information properties returned when + retrieving a specific job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar job_id: the job's unique identifier (a GUID). + :vartype job_id: str + :param name: the friendly name of the job. + :type name: str + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :ivar submitter: the user or account that submitted the job. + :vartype submitter: str + :param degree_of_parallelism: the degree of parallelism used for this job. + This must be greater than 0, if set to less than 0 it will default to 1. + Default value: 1 . + :type degree_of_parallelism: int + :param priority: the priority value for the current job. Lower numbers + have a higher priority. By default, a job has a priority of 1000. This + must be greater than 0. + :type priority: int + :ivar submit_time: the time the job was submitted to the service. + :vartype submit_time: datetime + :ivar start_time: the start time of the job. + :vartype start_time: datetime + :ivar end_time: the completion time of the job. + :vartype end_time: datetime + :ivar state: the job state. When the job is in the Ended state, refer to + Result and ErrorMessage for details. Possible values include: 'Accepted', + 'Compiling', 'Ended', 'New', 'Queued', 'Running', 'Scheduling', + 'Starting', 'Paused', 'WaitingForCapacity' + :vartype state: str or :class:`JobState + ` + :ivar result: the result of job execution or the current result of the + running job. Possible values include: 'None', 'Succeeded', 'Cancelled', + 'Failed' + :vartype result: str or :class:`JobResult + ` + :ivar log_folder: the log folder path to use in the following format: + adl://.azuredatalakestore.net/system/jobservice/jobs/Usql/2016/03/13/17/18/5fe51957-93bc-4de0-8ddc-c5a4753b068b/logs/. + :vartype log_folder: str + :param log_file_patterns: the list of log file name patterns to find in + the logFolder. '*' is the only matching character allowed. Example format: + jobExecution*.log or *mylog*.txt + :type log_file_patterns: list of str + :param related: the recurring job relationship information properties. + :type related: :class:`JobRelationshipProperties + ` + :ivar error_message: the error message details for the job, if the job + failed. + :vartype error_message: list of :class:`JobErrorDetails + ` + :ivar state_audit_records: the job state audit records, indicating when + various operations have been performed on this job. + :vartype state_audit_records: list of :class:`JobStateAuditRecord + ` + :param properties: the job specific properties. + :type properties: :class:`JobProperties + ` + """ + + _validation = { + 'job_id': {'readonly': True}, + 'name': {'required': True}, + 'type': {'required': True}, + 'submitter': {'readonly': True}, + 'submit_time': {'readonly': True}, + 'start_time': {'readonly': True}, + 'end_time': {'readonly': True}, + 'state': {'readonly': True}, + 'result': {'readonly': True}, + 'log_folder': {'readonly': True}, + 'error_message': {'readonly': True}, + 'state_audit_records': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'JobType'}, + 'submitter': {'key': 'submitter', 'type': 'str'}, + 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'state': {'key': 'state', 'type': 'JobState'}, + 'result': {'key': 'result', 'type': 'JobResult'}, + 'log_folder': {'key': 'logFolder', 'type': 'str'}, + 'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, + 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, + 'error_message': {'key': 'errorMessage', 'type': '[JobErrorDetails]'}, + 'state_audit_records': {'key': 'stateAuditRecords', 'type': '[JobStateAuditRecord]'}, + 'properties': {'key': 'properties', 'type': 'JobProperties'}, + } + + def __init__(self, name, type, properties, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None): + super(JobInformation, self).__init__(name=name, type=type, degree_of_parallelism=degree_of_parallelism, priority=priority, log_file_patterns=log_file_patterns, related=related) + self.error_message = None + self.state_audit_records = None + self.properties = properties diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information_basic.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information_basic.py new file mode 100644 index 00000000000..b20b9b65323 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information_basic.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobInformationBasic(Model): + """The common Data Lake Analytics job information properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar job_id: the job's unique identifier (a GUID). + :vartype job_id: str + :param name: the friendly name of the job. + :type name: str + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :ivar submitter: the user or account that submitted the job. + :vartype submitter: str + :param degree_of_parallelism: the degree of parallelism used for this job. + This must be greater than 0, if set to less than 0 it will default to 1. + Default value: 1 . + :type degree_of_parallelism: int + :param priority: the priority value for the current job. Lower numbers + have a higher priority. By default, a job has a priority of 1000. This + must be greater than 0. + :type priority: int + :ivar submit_time: the time the job was submitted to the service. + :vartype submit_time: datetime + :ivar start_time: the start time of the job. + :vartype start_time: datetime + :ivar end_time: the completion time of the job. + :vartype end_time: datetime + :ivar state: the job state. When the job is in the Ended state, refer to + Result and ErrorMessage for details. Possible values include: 'Accepted', + 'Compiling', 'Ended', 'New', 'Queued', 'Running', 'Scheduling', + 'Starting', 'Paused', 'WaitingForCapacity' + :vartype state: str or :class:`JobState + ` + :ivar result: the result of job execution or the current result of the + running job. Possible values include: 'None', 'Succeeded', 'Cancelled', + 'Failed' + :vartype result: str or :class:`JobResult + ` + :ivar log_folder: the log folder path to use in the following format: + adl://.azuredatalakestore.net/system/jobservice/jobs/Usql/2016/03/13/17/18/5fe51957-93bc-4de0-8ddc-c5a4753b068b/logs/. + :vartype log_folder: str + :param log_file_patterns: the list of log file name patterns to find in + the logFolder. '*' is the only matching character allowed. Example format: + jobExecution*.log or *mylog*.txt + :type log_file_patterns: list of str + :param related: the recurring job relationship information properties. + :type related: :class:`JobRelationshipProperties + ` + """ + + _validation = { + 'job_id': {'readonly': True}, + 'name': {'required': True}, + 'type': {'required': True}, + 'submitter': {'readonly': True}, + 'submit_time': {'readonly': True}, + 'start_time': {'readonly': True}, + 'end_time': {'readonly': True}, + 'state': {'readonly': True}, + 'result': {'readonly': True}, + 'log_folder': {'readonly': True}, + } + + _attribute_map = { + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'JobType'}, + 'submitter': {'key': 'submitter', 'type': 'str'}, + 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'state': {'key': 'state', 'type': 'JobState'}, + 'result': {'key': 'result', 'type': 'JobResult'}, + 'log_folder': {'key': 'logFolder', 'type': 'str'}, + 'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, + 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, + } + + def __init__(self, name, type, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None): + self.job_id = None + self.name = name + self.type = type + self.submitter = None + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.submit_time = None + self.start_time = None + self.end_time = None + self.state = None + self.result = None + self.log_folder = None + self.log_file_patterns = log_file_patterns + self.related = related diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information_basic_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information_basic_paged.py new file mode 100644 index 00000000000..85a36a8c551 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_information_basic_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class JobInformationBasicPaged(Paged): + """ + A paging container for iterating over a list of :class:`JobInformationBasic ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[JobInformationBasic]'} + } + + def __init__(self, *args, **kwargs): + + super(JobInformationBasicPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_inner_error.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_inner_error.py new file mode 100644 index 00000000000..00b1e4eaf1c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_inner_error.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobInnerError(Model): + """The Data Lake Analytics job error details. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar diagnostic_code: the diagnostic error code. + :vartype diagnostic_code: int + :ivar severity: the severity level of the failure. Possible values + include: 'Warning', 'Error', 'Info', 'SevereWarning', 'Deprecated', + 'UserWarning' + :vartype severity: str or :class:`SeverityTypes + ` + :ivar details: the details of the error message. + :vartype details: str + :ivar component: the component that failed. + :vartype component: str + :ivar error_id: the specific identifier for the type of error encountered + in the job. + :vartype error_id: str + :ivar help_link: the link to MSDN or Azure help for this type of error, if + any. + :vartype help_link: str + :ivar internal_diagnostics: the internal diagnostic stack trace if the + user requesting the job error details has sufficient permissions it will + be retrieved, otherwise it will be empty. + :vartype internal_diagnostics: str + :ivar message: the user friendly error message for the failure. + :vartype message: str + :ivar resolution: the recommended resolution for the failure, if any. + :vartype resolution: str + :ivar source: the ultimate source of the failure (usually either SYSTEM or + USER). + :vartype source: str + :ivar description: the error message description + :vartype description: str + """ + + _validation = { + 'diagnostic_code': {'readonly': True}, + 'severity': {'readonly': True}, + 'details': {'readonly': True}, + 'component': {'readonly': True}, + 'error_id': {'readonly': True}, + 'help_link': {'readonly': True}, + 'internal_diagnostics': {'readonly': True}, + 'message': {'readonly': True}, + 'resolution': {'readonly': True}, + 'source': {'readonly': True}, + 'description': {'readonly': True}, + } + + _attribute_map = { + 'diagnostic_code': {'key': 'diagnosticCode', 'type': 'int'}, + 'severity': {'key': 'severity', 'type': 'SeverityTypes'}, + 'details': {'key': 'details', 'type': 'str'}, + 'component': {'key': 'component', 'type': 'str'}, + 'error_id': {'key': 'errorId', 'type': 'str'}, + 'help_link': {'key': 'helpLink', 'type': 'str'}, + 'internal_diagnostics': {'key': 'internalDiagnostics', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'resolution': {'key': 'resolution', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self): + self.diagnostic_code = None + self.severity = None + self.details = None + self.component = None + self.error_id = None + self.help_link = None + self.internal_diagnostics = None + self.message = None + self.resolution = None + self.source = None + self.description = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_information.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_information.py new file mode 100644 index 00000000000..99a475912ae --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_information.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobPipelineInformation(Model): + """Job Pipeline Information, showing the relationship of jobs and recurrences + of those jobs in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar pipeline_id: the job relationship pipeline identifier (a GUID). + :vartype pipeline_id: str + :ivar pipeline_name: the friendly name of the job relationship pipeline, + which does not need to be unique. + :vartype pipeline_name: str + :ivar pipeline_uri: the pipeline uri, unique, links to the originating + service for this pipeline. + :vartype pipeline_uri: str + :ivar num_jobs_failed: the number of jobs in this pipeline that have + failed. + :vartype num_jobs_failed: int + :ivar num_jobs_canceled: the number of jobs in this pipeline that have + been canceled. + :vartype num_jobs_canceled: int + :ivar num_jobs_succeeded: the number of jobs in this pipeline that have + succeeded. + :vartype num_jobs_succeeded: int + :ivar au_hours_failed: the number of job execution hours that resulted in + failed jobs. + :vartype au_hours_failed: float + :ivar au_hours_canceled: the number of job execution hours that resulted + in canceled jobs. + :vartype au_hours_canceled: float + :ivar au_hours_succeeded: the number of job execution hours that resulted + in successful jobs. + :vartype au_hours_succeeded: float + :ivar last_submit_time: the last time a job in this pipeline was + submitted. + :vartype last_submit_time: datetime + :ivar runs: the list of recurrence identifiers representing each run of + this pipeline. + :vartype runs: list of :class:`JobPipelineRunInformation + ` + :ivar recurrences: the list of recurrence identifiers representing each + run of this pipeline. + :vartype recurrences: list of str + """ + + _validation = { + 'pipeline_id': {'readonly': True}, + 'pipeline_name': {'readonly': True, 'max_length': 260}, + 'pipeline_uri': {'readonly': True}, + 'num_jobs_failed': {'readonly': True}, + 'num_jobs_canceled': {'readonly': True}, + 'num_jobs_succeeded': {'readonly': True}, + 'au_hours_failed': {'readonly': True}, + 'au_hours_canceled': {'readonly': True}, + 'au_hours_succeeded': {'readonly': True}, + 'last_submit_time': {'readonly': True}, + 'runs': {'readonly': True}, + 'recurrences': {'readonly': True}, + } + + _attribute_map = { + 'pipeline_id': {'key': 'pipelineId', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_uri': {'key': 'pipelineUri', 'type': 'str'}, + 'num_jobs_failed': {'key': 'numJobsFailed', 'type': 'int'}, + 'num_jobs_canceled': {'key': 'numJobsCanceled', 'type': 'int'}, + 'num_jobs_succeeded': {'key': 'numJobsSucceeded', 'type': 'int'}, + 'au_hours_failed': {'key': 'auHoursFailed', 'type': 'float'}, + 'au_hours_canceled': {'key': 'auHoursCanceled', 'type': 'float'}, + 'au_hours_succeeded': {'key': 'auHoursSucceeded', 'type': 'float'}, + 'last_submit_time': {'key': 'lastSubmitTime', 'type': 'iso-8601'}, + 'runs': {'key': 'runs', 'type': '[JobPipelineRunInformation]'}, + 'recurrences': {'key': 'recurrences', 'type': '[str]'}, + } + + def __init__(self): + self.pipeline_id = None + self.pipeline_name = None + self.pipeline_uri = None + self.num_jobs_failed = None + self.num_jobs_canceled = None + self.num_jobs_succeeded = None + self.au_hours_failed = None + self.au_hours_canceled = None + self.au_hours_succeeded = None + self.last_submit_time = None + self.runs = None + self.recurrences = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_information_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_information_paged.py new file mode 100644 index 00000000000..c33529c88e6 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_information_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class JobPipelineInformationPaged(Paged): + """ + A paging container for iterating over a list of :class:`JobPipelineInformation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[JobPipelineInformation]'} + } + + def __init__(self, *args, **kwargs): + + super(JobPipelineInformationPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_run_information.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_run_information.py new file mode 100644 index 00000000000..ff829170c6e --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_pipeline_run_information.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobPipelineRunInformation(Model): + """Run info for a specific job pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar run_id: the run identifier of an instance of pipeline executions (a + GUID). + :vartype run_id: str + :ivar last_submit_time: the time this instance was last submitted. + :vartype last_submit_time: datetime + """ + + _validation = { + 'run_id': {'readonly': True}, + 'last_submit_time': {'readonly': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + 'last_submit_time': {'key': 'lastSubmitTime', 'type': 'iso-8601'}, + } + + def __init__(self): + self.run_id = None + self.last_submit_time = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_properties.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_properties.py new file mode 100644 index 00000000000..54ecbbff64a --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_properties.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobProperties(Model): + """The common Data Lake Analytics job properties. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'USql': 'USqlJobProperties', 'Hive': 'HiveJobProperties'} + } + + def __init__(self, script, runtime_version=None): + self.runtime_version = runtime_version + self.script = script + self.type = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_recurrence_information.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_recurrence_information.py new file mode 100644 index 00000000000..b37a2c98a00 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_recurrence_information.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobRecurrenceInformation(Model): + """Recurrence job information for a specific recurrence. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar recurrence_id: the recurrence identifier (a GUID), unique per + activity/script, regardless of iterations. This is something to link + different occurrences of the same job together. + :vartype recurrence_id: str + :ivar recurrence_name: the recurrence name, user friendly name for the + correlation between jobs. + :vartype recurrence_name: str + :ivar num_jobs_failed: the number of jobs in this recurrence that have + failed. + :vartype num_jobs_failed: int + :ivar num_jobs_canceled: the number of jobs in this recurrence that have + been canceled. + :vartype num_jobs_canceled: int + :ivar num_jobs_succeeded: the number of jobs in this recurrence that have + succeeded. + :vartype num_jobs_succeeded: int + :ivar au_hours_failed: the number of job execution hours that resulted in + failed jobs. + :vartype au_hours_failed: float + :ivar au_hours_canceled: the number of job execution hours that resulted + in canceled jobs. + :vartype au_hours_canceled: float + :ivar au_hours_succeeded: the number of job execution hours that resulted + in successful jobs. + :vartype au_hours_succeeded: float + :ivar last_submit_time: the last time a job in this recurrence was + submitted. + :vartype last_submit_time: datetime + """ + + _validation = { + 'recurrence_id': {'readonly': True}, + 'recurrence_name': {'readonly': True}, + 'num_jobs_failed': {'readonly': True}, + 'num_jobs_canceled': {'readonly': True}, + 'num_jobs_succeeded': {'readonly': True}, + 'au_hours_failed': {'readonly': True}, + 'au_hours_canceled': {'readonly': True}, + 'au_hours_succeeded': {'readonly': True}, + 'last_submit_time': {'readonly': True}, + } + + _attribute_map = { + 'recurrence_id': {'key': 'recurrenceId', 'type': 'str'}, + 'recurrence_name': {'key': 'recurrenceName', 'type': 'str'}, + 'num_jobs_failed': {'key': 'numJobsFailed', 'type': 'int'}, + 'num_jobs_canceled': {'key': 'numJobsCanceled', 'type': 'int'}, + 'num_jobs_succeeded': {'key': 'numJobsSucceeded', 'type': 'int'}, + 'au_hours_failed': {'key': 'auHoursFailed', 'type': 'float'}, + 'au_hours_canceled': {'key': 'auHoursCanceled', 'type': 'float'}, + 'au_hours_succeeded': {'key': 'auHoursSucceeded', 'type': 'float'}, + 'last_submit_time': {'key': 'lastSubmitTime', 'type': 'iso-8601'}, + } + + def __init__(self): + self.recurrence_id = None + self.recurrence_name = None + self.num_jobs_failed = None + self.num_jobs_canceled = None + self.num_jobs_succeeded = None + self.au_hours_failed = None + self.au_hours_canceled = None + self.au_hours_succeeded = None + self.last_submit_time = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_recurrence_information_paged.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_recurrence_information_paged.py new file mode 100644 index 00000000000..ead66eb0c35 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_recurrence_information_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class JobRecurrenceInformationPaged(Paged): + """ + A paging container for iterating over a list of :class:`JobRecurrenceInformation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[JobRecurrenceInformation]'} + } + + def __init__(self, *args, **kwargs): + + super(JobRecurrenceInformationPaged, self).__init__(*args, **kwargs) diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_relationship_properties.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_relationship_properties.py new file mode 100644 index 00000000000..df704d923be --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_relationship_properties.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobRelationshipProperties(Model): + """Job relationship information properties including pipeline information, + correlation information, etc. + + :param pipeline_id: the job relationship pipeline identifier (a GUID). + :type pipeline_id: str + :param pipeline_name: the friendly name of the job relationship pipeline, + which does not need to be unique. + :type pipeline_name: str + :param pipeline_uri: the pipeline uri, unique, links to the originating + service for this pipeline. + :type pipeline_uri: str + :param run_id: the run identifier (a GUID), unique identifier of the + iteration of this pipeline. + :type run_id: str + :param recurrence_id: the recurrence identifier (a GUID), unique per + activity/script, regardless of iterations. This is something to link + different occurrences of the same job together. + :type recurrence_id: str + :param recurrence_name: the recurrence name, user friendly name for the + correlation between jobs. + :type recurrence_name: str + """ + + _validation = { + 'pipeline_name': {'max_length': 260}, + 'recurrence_id': {'required': True}, + 'recurrence_name': {'max_length': 260}, + } + + _attribute_map = { + 'pipeline_id': {'key': 'pipelineId', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_uri': {'key': 'pipelineUri', 'type': 'str'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'recurrence_id': {'key': 'recurrenceId', 'type': 'str'}, + 'recurrence_name': {'key': 'recurrenceName', 'type': 'str'}, + } + + def __init__(self, recurrence_id, pipeline_id=None, pipeline_name=None, pipeline_uri=None, run_id=None, recurrence_name=None): + self.pipeline_id = pipeline_id + self.pipeline_name = pipeline_name + self.pipeline_uri = pipeline_uri + self.run_id = run_id + self.recurrence_id = recurrence_id + self.recurrence_name = recurrence_name diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_resource.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_resource.py new file mode 100644 index 00000000000..fd433c5aa75 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_resource.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobResource(Model): + """The Data Lake Analytics job resources. + + :param name: the name of the resource. + :type name: str + :param resource_path: the path to the resource. + :type resource_path: str + :param type: the job resource type. Possible values include: + 'VertexResource', 'JobManagerResource', 'StatisticsResource', + 'VertexResourceInUserFolder', 'JobManagerResourceInUserFolder', + 'StatisticsResourceInUserFolder' + :type type: str or :class:`JobResourceType + ` + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'resource_path': {'key': 'resourcePath', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'JobResourceType'}, + } + + def __init__(self, name=None, resource_path=None, type=None): + self.name = name + self.resource_path = resource_path + self.type = type diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_state_audit_record.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_state_audit_record.py new file mode 100644 index 00000000000..50c488141fe --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_state_audit_record.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobStateAuditRecord(Model): + """The Data Lake Analytics job state audit records for tracking the lifecycle + of a job. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar new_state: the new state the job is in. + :vartype new_state: str + :ivar time_stamp: the time stamp that the state change took place. + :vartype time_stamp: datetime + :ivar requested_by_user: the user who requests the change. + :vartype requested_by_user: str + :ivar details: the details of the audit log. + :vartype details: str + """ + + _validation = { + 'new_state': {'readonly': True}, + 'time_stamp': {'readonly': True}, + 'requested_by_user': {'readonly': True}, + 'details': {'readonly': True}, + } + + _attribute_map = { + 'new_state': {'key': 'newState', 'type': 'str'}, + 'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'}, + 'requested_by_user': {'key': 'requestedByUser', 'type': 'str'}, + 'details': {'key': 'details', 'type': 'str'}, + } + + def __init__(self): + self.new_state = None + self.time_stamp = None + self.requested_by_user = None + self.details = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_statistics.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_statistics.py new file mode 100644 index 00000000000..ff8bd6e0317 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_statistics.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobStatistics(Model): + """The Data Lake Analytics job execution statistics. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar last_update_time_utc: the last update time for the statistics. + :vartype last_update_time_utc: datetime + :ivar finalizing_time_utc: the job finalizing start time. + :vartype finalizing_time_utc: datetime + :ivar stages: the list of stages for the job. + :vartype stages: list of :class:`JobStatisticsVertexStage + ` + """ + + _validation = { + 'last_update_time_utc': {'readonly': True}, + 'finalizing_time_utc': {'readonly': True}, + 'stages': {'readonly': True}, + } + + _attribute_map = { + 'last_update_time_utc': {'key': 'lastUpdateTimeUtc', 'type': 'iso-8601'}, + 'finalizing_time_utc': {'key': 'finalizingTimeUtc', 'type': 'iso-8601'}, + 'stages': {'key': 'stages', 'type': '[JobStatisticsVertexStage]'}, + } + + def __init__(self): + self.last_update_time_utc = None + self.finalizing_time_utc = None + self.stages = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_statistics_vertex_stage.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_statistics_vertex_stage.py new file mode 100644 index 00000000000..1cb065a4dcf --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/job_statistics_vertex_stage.py @@ -0,0 +1,139 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobStatisticsVertexStage(Model): + """The Data Lake Analytics job statistics vertex stage information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar data_read: the amount of data read, in bytes. + :vartype data_read: long + :ivar data_read_cross_pod: the amount of data read across multiple pods, + in bytes. + :vartype data_read_cross_pod: long + :ivar data_read_intra_pod: the amount of data read in one pod, in bytes. + :vartype data_read_intra_pod: long + :ivar data_to_read: the amount of data remaining to be read, in bytes. + :vartype data_to_read: long + :ivar data_written: the amount of data written, in bytes. + :vartype data_written: long + :ivar duplicate_discard_count: the number of duplicates that were + discarded. + :vartype duplicate_discard_count: int + :ivar failed_count: the number of failures that occured in this stage. + :vartype failed_count: int + :ivar max_vertex_data_read: the maximum amount of data read in a single + vertex, in bytes. + :vartype max_vertex_data_read: long + :ivar min_vertex_data_read: the minimum amount of data read in a single + vertex, in bytes. + :vartype min_vertex_data_read: long + :ivar read_failure_count: the number of read failures in this stage. + :vartype read_failure_count: int + :ivar revocation_count: the number of vertices that were revoked during + this stage. + :vartype revocation_count: int + :ivar running_count: the number of currently running vertices in this + stage. + :vartype running_count: int + :ivar scheduled_count: the number of currently scheduled vertices in this + stage + :vartype scheduled_count: int + :ivar stage_name: the name of this stage in job execution. + :vartype stage_name: str + :ivar succeeded_count: the number of vertices that succeeded in this + stage. + :vartype succeeded_count: int + :ivar temp_data_written: the amount of temporary data written, in bytes. + :vartype temp_data_written: long + :ivar total_count: the total vertex count for this stage. + :vartype total_count: int + :ivar total_failed_time: the amount of time that failed vertices took up + in this stage. + :vartype total_failed_time: timedelta + :ivar total_progress: the current progress of this stage, as a percentage. + :vartype total_progress: int + :ivar total_succeeded_time: the amount of time all successful vertices + took in this stage. + :vartype total_succeeded_time: timedelta + """ + + _validation = { + 'data_read': {'readonly': True}, + 'data_read_cross_pod': {'readonly': True}, + 'data_read_intra_pod': {'readonly': True}, + 'data_to_read': {'readonly': True}, + 'data_written': {'readonly': True}, + 'duplicate_discard_count': {'readonly': True}, + 'failed_count': {'readonly': True}, + 'max_vertex_data_read': {'readonly': True}, + 'min_vertex_data_read': {'readonly': True}, + 'read_failure_count': {'readonly': True}, + 'revocation_count': {'readonly': True}, + 'running_count': {'readonly': True}, + 'scheduled_count': {'readonly': True}, + 'stage_name': {'readonly': True}, + 'succeeded_count': {'readonly': True}, + 'temp_data_written': {'readonly': True}, + 'total_count': {'readonly': True}, + 'total_failed_time': {'readonly': True}, + 'total_progress': {'readonly': True}, + 'total_succeeded_time': {'readonly': True}, + } + + _attribute_map = { + 'data_read': {'key': 'dataRead', 'type': 'long'}, + 'data_read_cross_pod': {'key': 'dataReadCrossPod', 'type': 'long'}, + 'data_read_intra_pod': {'key': 'dataReadIntraPod', 'type': 'long'}, + 'data_to_read': {'key': 'dataToRead', 'type': 'long'}, + 'data_written': {'key': 'dataWritten', 'type': 'long'}, + 'duplicate_discard_count': {'key': 'duplicateDiscardCount', 'type': 'int'}, + 'failed_count': {'key': 'failedCount', 'type': 'int'}, + 'max_vertex_data_read': {'key': 'maxVertexDataRead', 'type': 'long'}, + 'min_vertex_data_read': {'key': 'minVertexDataRead', 'type': 'long'}, + 'read_failure_count': {'key': 'readFailureCount', 'type': 'int'}, + 'revocation_count': {'key': 'revocationCount', 'type': 'int'}, + 'running_count': {'key': 'runningCount', 'type': 'int'}, + 'scheduled_count': {'key': 'scheduledCount', 'type': 'int'}, + 'stage_name': {'key': 'stageName', 'type': 'str'}, + 'succeeded_count': {'key': 'succeededCount', 'type': 'int'}, + 'temp_data_written': {'key': 'tempDataWritten', 'type': 'long'}, + 'total_count': {'key': 'totalCount', 'type': 'int'}, + 'total_failed_time': {'key': 'totalFailedTime', 'type': 'duration'}, + 'total_progress': {'key': 'totalProgress', 'type': 'int'}, + 'total_succeeded_time': {'key': 'totalSucceededTime', 'type': 'duration'}, + } + + def __init__(self): + self.data_read = None + self.data_read_cross_pod = None + self.data_read_intra_pod = None + self.data_to_read = None + self.data_written = None + self.duplicate_discard_count = None + self.failed_count = None + self.max_vertex_data_read = None + self.min_vertex_data_read = None + self.read_failure_count = None + self.revocation_count = None + self.running_count = None + self.scheduled_count = None + self.stage_name = None + self.succeeded_count = None + self.temp_data_written = None + self.total_count = None + self.total_failed_time = None + self.total_progress = None + self.total_succeeded_time = None diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/usql_job_properties.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/usql_job_properties.py new file mode 100644 index 00000000000..0fc94bebc07 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/models/usql_job_properties.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .job_properties import JobProperties + + +class USqlJobProperties(JobProperties): + """U-SQL job properties used when retrieving U-SQL jobs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + :ivar resources: the list of resources that are required by the job + :vartype resources: list of :class:`JobResource + ` + :ivar statistics: the job specific statistics. + :vartype statistics: :class:`JobStatistics + ` + :ivar debug_data: the job specific debug data locations. + :vartype debug_data: :class:`JobDataPath + ` + :ivar diagnostics: the diagnostics for the job. + :vartype diagnostics: list of :class:`Diagnostics + ` + :ivar algebra_file_path: the algebra file path after the job has completed + :vartype algebra_file_path: str + :ivar total_compilation_time: the total time this job spent compiling. + This value should not be set by the user and will be ignored if it is. + :vartype total_compilation_time: timedelta + :ivar total_pause_time: the total time this job spent paused. This value + should not be set by the user and will be ignored if it is. + :vartype total_pause_time: timedelta + :ivar total_queued_time: the total time this job spent queued. This value + should not be set by the user and will be ignored if it is. + :vartype total_queued_time: timedelta + :ivar total_running_time: the total time this job spent executing. This + value should not be set by the user and will be ignored if it is. + :vartype total_running_time: timedelta + :ivar root_process_node_id: the ID used to identify the job manager + coordinating job execution. This value should not be set by the user and + will be ignored if it is. + :vartype root_process_node_id: str + :ivar yarn_application_id: the ID used to identify the yarn application + executing the job. This value should not be set by the user and will be + ignored if it is. + :vartype yarn_application_id: str + :ivar yarn_application_time_stamp: the timestamp (in ticks) for the yarn + application executing the job. This value should not be set by the user + and will be ignored if it is. + :vartype yarn_application_time_stamp: long + :ivar compile_mode: the specific compilation mode for the job used during + execution. If this is not specified during submission, the server will + determine the optimal compilation mode. Possible values include: + 'Semantic', 'Full', 'SingleBox' + :vartype compile_mode: str or :class:`CompileMode + ` + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + 'resources': {'readonly': True}, + 'statistics': {'readonly': True}, + 'debug_data': {'readonly': True}, + 'diagnostics': {'readonly': True}, + 'algebra_file_path': {'readonly': True}, + 'total_compilation_time': {'readonly': True}, + 'total_pause_time': {'readonly': True}, + 'total_queued_time': {'readonly': True}, + 'total_running_time': {'readonly': True}, + 'root_process_node_id': {'readonly': True}, + 'yarn_application_id': {'readonly': True}, + 'yarn_application_time_stamp': {'readonly': True}, + 'compile_mode': {'readonly': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'resources': {'key': 'resources', 'type': '[JobResource]'}, + 'statistics': {'key': 'statistics', 'type': 'JobStatistics'}, + 'debug_data': {'key': 'debugData', 'type': 'JobDataPath'}, + 'diagnostics': {'key': 'diagnostics', 'type': '[Diagnostics]'}, + 'algebra_file_path': {'key': 'algebraFilePath', 'type': 'str'}, + 'total_compilation_time': {'key': 'totalCompilationTime', 'type': 'duration'}, + 'total_pause_time': {'key': 'totalPauseTime', 'type': 'duration'}, + 'total_queued_time': {'key': 'totalQueuedTime', 'type': 'duration'}, + 'total_running_time': {'key': 'totalRunningTime', 'type': 'duration'}, + 'root_process_node_id': {'key': 'rootProcessNodeId', 'type': 'str'}, + 'yarn_application_id': {'key': 'yarnApplicationId', 'type': 'str'}, + 'yarn_application_time_stamp': {'key': 'yarnApplicationTimeStamp', 'type': 'long'}, + 'compile_mode': {'key': 'compileMode', 'type': 'CompileMode'}, + } + + def __init__(self, script, runtime_version=None): + super(USqlJobProperties, self).__init__(runtime_version=runtime_version, script=script) + self.resources = None + self.statistics = None + self.debug_data = None + self.diagnostics = None + self.algebra_file_path = None + self.total_compilation_time = None + self.total_pause_time = None + self.total_queued_time = None + self.total_running_time = None + self.root_process_node_id = None + self.yarn_application_id = None + self.yarn_application_time_stamp = None + self.compile_mode = None + self.type = 'USql' diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/__init__.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/__init__.py new file mode 100644 index 00000000000..828bb030573 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .pipeline_operations import PipelineOperations +from .recurrence_operations import RecurrenceOperations +from .job_operations import JobOperations + +__all__ = [ + 'PipelineOperations', + 'RecurrenceOperations', + 'JobOperations', +] diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/job_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/job_operations.py new file mode 100644 index 00000000000..784575a75aa --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/job_operations.py @@ -0,0 +1,538 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class JobOperations(object): + """JobOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def get_statistics( + self, account_name, job_identity, custom_headers=None, raw=False, **operation_config): + """Gets statistics of the specified job. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param job_identity: Job Information ID. + :type job_identity: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobStatistics + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobStatistics + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/Jobs/{jobIdentity}/GetStatistics' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'jobIdentity': self._serialize.url("job_identity", job_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobStatistics', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def get_debug_data_path( + self, account_name, job_identity, custom_headers=None, raw=False, **operation_config): + """Gets the job debug data information specified by the job ID. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param job_identity: JobInfo ID. + :type job_identity: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobDataPath + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobDataPath + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/Jobs/{jobIdentity}/GetDebugDataPath' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'jobIdentity': self._serialize.url("job_identity", job_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobDataPath', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def build( + self, account_name, parameters, custom_headers=None, raw=False, **operation_config): + """Builds (compiles) the specified job in the specified Data Lake + Analytics account for job correctness and validation. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param parameters: The parameters to build a job. + :type parameters: :class:`BuildJobParameters + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobInformation + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobInformation + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/BuildJob' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'BuildJobParameters') + + # Construct and send request + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobInformation', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def cancel( + self, account_name, job_identity, custom_headers=None, raw=False, **operation_config): + """Cancels the running job specified by the job ID. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param job_identity: JobInfo ID to cancel. + :type job_identity: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/Jobs/{jobIdentity}/CancelJob' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'jobIdentity': self._serialize.url("job_identity", job_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def create( + self, account_name, job_identity, parameters, custom_headers=None, raw=False, **operation_config): + """Submits a job to the specified Data Lake Analytics account. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param job_identity: The job ID (a GUID) for the job being submitted. + :type job_identity: str + :param parameters: The parameters to submit a job. + :type parameters: :class:`CreateJobParameters + ` + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobInformation + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobInformation + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/Jobs/{jobIdentity}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'jobIdentity': self._serialize.url("job_identity", job_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'CreateJobParameters') + + # Construct and send request + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobInformation', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def get( + self, account_name, job_identity, custom_headers=None, raw=False, **operation_config): + """Gets the job information for the specified job ID. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param job_identity: JobInfo ID. + :type job_identity: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobInformation + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobInformation + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/Jobs/{jobIdentity}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'jobIdentity': self._serialize.url("job_identity", job_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobInformation', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def list( + self, account_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): + """Lists the jobs, if any, associated with the specified Data Lake + Analytics account. The response includes a link to the next page of + results, if any. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param filter: OData filter. Optional. + :type filter: str + :param top: The number of items to return. Optional. + :type top: int + :param skip: The number of items to skip over before returning + elements. Optional. + :type skip: int + :param select: OData Select statement. Limits the properties on each + entry to just those requested, e.g. + Categories?$select=CategoryName,Description. Optional. + :type select: str + :param orderby: OrderBy clause. One or more comma-separated + expressions with an optional "asc" (the default) or "desc" depending + on the order you'd like the values sorted, e.g. + Categories?$orderby=CategoryName desc. Optional. + :type orderby: str + :param count: The Boolean value of true or false to request a count of + the matching resources included with the resources in the response, + e.g. Categories?$count=true. Optional. + :type count: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`JobInformationBasic + ` + :rtype: :class:`JobInformationBasicPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/Jobs' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=1) + if select is not None: + query_parameters['$select'] = self._serialize.query("select", select, 'str') + if orderby is not None: + query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str') + if count is not None: + query_parameters['$count'] = self._serialize.query("count", count, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.JobInformationBasicPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.JobInformationBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/pipeline_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/pipeline_operations.py new file mode 100644 index 00000000000..2d483b9d84c --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/pipeline_operations.py @@ -0,0 +1,195 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PipelineOperations(object): + """PipelineOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def list( + self, account_name, start_date_time=None, end_date_time=None, custom_headers=None, raw=False, **operation_config): + """Lists all pipelines. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param start_date_time: The start date for when to get the list of + pipelines. The startDateTime and endDateTime can be no more than 30 + days apart. + :type start_date_time: datetime + :param end_date_time: The end date for when to get the list of + pipelines. The startDateTime and endDateTime can be no more than 30 + days apart. + :type end_date_time: datetime + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`JobPipelineInformation + ` + :rtype: :class:`JobPipelineInformationPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/pipelines' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if start_date_time is not None: + query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601') + if end_date_time is not None: + query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.JobPipelineInformationPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.JobPipelineInformationPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get( + self, account_name, pipeline_identity, start_date_time=None, end_date_time=None, custom_headers=None, raw=False, **operation_config): + """Gets the Pipeline information for the specified pipeline ID. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param pipeline_identity: Pipeline ID. + :type pipeline_identity: str + :param start_date_time: The start date for when to get the pipeline + and aggregate its data. The startDateTime and endDateTime can be no + more than 30 days apart. + :type start_date_time: datetime + :param end_date_time: The end date for when to get the pipeline and + aggregate its data. The startDateTime and endDateTime can be no more + than 30 days apart. + :type end_date_time: datetime + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobPipelineInformation + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobPipelineInformation + ` or + :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/pipelines/{pipelineIdentity}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'pipelineIdentity': self._serialize.url("pipeline_identity", pipeline_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if start_date_time is not None: + query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601') + if end_date_time is not None: + query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobPipelineInformation', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/recurrence_operations.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/recurrence_operations.py new file mode 100644 index 00000000000..c9d3641f372 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/operations/recurrence_operations.py @@ -0,0 +1,195 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class RecurrenceOperations(object): + """RecurrenceOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An objec model deserializer. + :ivar api_version: Client Api Version. Constant value: "2016-11-01". + """ + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2016-11-01" + + self.config = config + + def list( + self, account_name, start_date_time=None, end_date_time=None, custom_headers=None, raw=False, **operation_config): + """Lists all recurrences. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param start_date_time: The start date for when to get the list of + recurrences. The startDateTime and endDateTime can be no more than 30 + days apart. + :type start_date_time: datetime + :param end_date_time: The end date for when to get the list of + recurrences. The startDateTime and endDateTime can be no more than 30 + days apart. + :type end_date_time: datetime + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of :class:`JobRecurrenceInformation + ` + :rtype: :class:`JobRecurrenceInformationPaged + ` + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = '/recurrences' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if start_date_time is not None: + query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601') + if end_date_time is not None: + query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.JobRecurrenceInformationPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.JobRecurrenceInformationPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + + def get( + self, account_name, recurrence_identity, start_date_time=None, end_date_time=None, custom_headers=None, raw=False, **operation_config): + """Gets the recurrence information for the specified recurrence ID. + + :param account_name: The Azure Data Lake Analytics account to execute + job operations on. + :type account_name: str + :param recurrence_identity: Recurrence ID. + :type recurrence_identity: str + :param start_date_time: The start date for when to get the recurrence + and aggregate its data. The startDateTime and endDateTime can be no + more than 30 days apart. + :type start_date_time: datetime + :param end_date_time: The end date for when to get recurrence and + aggregate its data. The startDateTime and endDateTime can be no more + than 30 days apart. + :type end_date_time: datetime + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`JobRecurrenceInformation + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`JobRecurrenceInformation + ` + or :class:`ClientRawResponse` + :raises: :class:`CloudError` + """ + # Construct URL + url = '/recurrences/{recurrenceIdentity}' + path_format_arguments = { + 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), + 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), + 'recurrenceIdentity': self._serialize.url("recurrence_identity", recurrence_identity, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if start_date_time is not None: + query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601') + if end_date_time is not None: + query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('JobRecurrenceInformation', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/version.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/version.py new file mode 100644 index 00000000000..9a6b4374370 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/job/version.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "0.1.6" + diff --git a/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/version.py b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/version.py new file mode 100644 index 00000000000..b4bdc86e2d5 --- /dev/null +++ b/src/azure-cli/azure/cli/command_modules/dla/vendored_sdks/azure_mgmt_datalake_analytics/version.py @@ -0,0 +1,8 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "0.2.1" diff --git a/src/azure-cli/requirements.py3.Darwin.txt b/src/azure-cli/requirements.py3.Darwin.txt index 4070e7836b7..2a203ead9fe 100644 --- a/src/azure-cli/requirements.py3.Darwin.txt +++ b/src/azure-cli/requirements.py3.Darwin.txt @@ -39,7 +39,6 @@ azure-mgmt-containerservice==25.0.0 azure-mgmt-core==1.3.2 azure-mgmt-cosmosdb==9.2.0 azure-mgmt-databoxedge==1.0.0 -azure-mgmt-datalake-analytics==0.2.1 azure-mgmt-datalake-nspkg==3.0.1 azure-mgmt-datalake-store==0.5.0 azure-mgmt-datamigration==10.0.0 diff --git a/src/azure-cli/requirements.py3.Linux.txt b/src/azure-cli/requirements.py3.Linux.txt index 4511f60dbd8..345f17add05 100644 --- a/src/azure-cli/requirements.py3.Linux.txt +++ b/src/azure-cli/requirements.py3.Linux.txt @@ -39,7 +39,6 @@ azure-mgmt-containerservice==25.0.0 azure-mgmt-core==1.3.2 azure-mgmt-cosmosdb==9.2.0 azure-mgmt-databoxedge==1.0.0 -azure-mgmt-datalake-analytics==0.2.1 azure-mgmt-datalake-nspkg==3.0.1 azure-mgmt-datalake-store==0.5.0 azure-mgmt-datamigration==10.0.0 diff --git a/src/azure-cli/requirements.py3.windows.txt b/src/azure-cli/requirements.py3.windows.txt index 66aca98ac56..7df5515e841 100644 --- a/src/azure-cli/requirements.py3.windows.txt +++ b/src/azure-cli/requirements.py3.windows.txt @@ -39,7 +39,6 @@ azure-mgmt-containerservice==25.0.0 azure-mgmt-core==1.3.2 azure-mgmt-cosmosdb==9.2.0 azure-mgmt-databoxedge==1.0.0 -azure-mgmt-datalake-analytics==0.2.1 azure-mgmt-datalake-nspkg==3.0.1 azure-mgmt-datalake-store==0.5.0 azure-mgmt-datamigration==10.0.0 diff --git a/src/azure-cli/setup.py b/src/azure-cli/setup.py index cc021bbca49..506ed8f313d 100644 --- a/src/azure-cli/setup.py +++ b/src/azure-cli/setup.py @@ -82,7 +82,6 @@ 'azure-mgmt-containerservice~=25.0.0', 'azure-mgmt-cosmosdb==9.2.0', 'azure-mgmt-databoxedge~=1.0.0', - 'azure-mgmt-datalake-analytics~=0.2.1', 'azure-mgmt-datalake-store~=0.5.0', 'azure-mgmt-datamigration~=10.0.0', 'azure-mgmt-devtestlabs~=4.0',