diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in b/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json new file mode 100644 index 000000000000..208a30c170e9 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.4.5", + "use": [ + "@autorest/python@5.8.4", + "@autorest/modelerfour@4.19.2" + ], + "commit": "d48576a8e4a5c58e27aa2042f83898a30d94b8d3", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/modelerfour@4.19.2 --version=3.4.5", + "readme": "specification/streamanalytics/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py index 1034fe20616d..725ad429320f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py @@ -48,6 +48,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.api_version = "2020-03-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-streamanalytics/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json index 5ff7c85f8f06..63d4a0f8ac17 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_metadata.json @@ -1,6 +1,6 @@ { - "chosen_version": "", - "total_api_version_list": ["2017-04-01-preview", "2020-03-01-preview"], + "chosen_version": "2020-03-01", + "total_api_version_list": ["2020-03-01"], "client": { "name": "StreamAnalyticsManagementClient", "filename": "_stream_analytics_management_client", @@ -8,32 +8,35 @@ "base_url": "\u0027https://management.azure.com\u0027", "custom_base_url": null, "azure_arm": true, - "has_lro_operations": true + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StreamAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StreamAnalyticsManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" }, "global_parameters": { - "sync_method": { + "sync": { "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", + "signature": "credential, # type: \"TokenCredential\"", "description": "Credential needed for the client to connect to Azure.", "docstring_type": "~azure.core.credentials.TokenCredential", "required": true }, "subscription_id": { - "method_signature": "subscription_id, # type: str", + "signature": "subscription_id, # type: str", "description": "The ID of the target subscription.", "docstring_type": "str", "required": true } }, - "async_method": { + "async": { "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", + "signature": "credential: \"AsyncTokenCredential\",", "description": "Credential needed for the client to connect to Azure.", "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", "required": true }, "subscription_id": { - "method_signature": "subscription_id, # type: str", + "signature": "subscription_id: str,", "description": "The ID of the target subscription.", "docstring_type": "str", "required": true @@ -41,28 +44,68 @@ }, "constant": { }, - "call": "credential, subscription_id" + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } }, "config": { "credential": true, "credential_scopes": ["https://management.azure.com/.default"], "credential_default_policy_type": "BearerTokenCredentialPolicy", "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" }, "operation_groups": { - "functions": "FunctionsOperations", + "operations": "Operations", + "streaming_jobs": "StreamingJobsOperations", "inputs": "InputsOperations", "outputs": "OutputsOperations", - "streaming_jobs": "StreamingJobsOperations", - "subscriptions": "SubscriptionsOperations", "transformations": "TransformationsOperations", - "operations": "Operations", + "functions": "FunctionsOperations", + "subscriptions": "SubscriptionsOperations", "clusters": "ClustersOperations", "private_endpoints": "PrivateEndpointsOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" + } } \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py index 7322c3fb1613..0d179377d25f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -16,15 +16,16 @@ from typing import Any, Optional from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse from ._configuration import StreamAnalyticsManagementClientConfiguration -from .operations import FunctionsOperations +from .operations import Operations +from .operations import StreamingJobsOperations from .operations import InputsOperations from .operations import OutputsOperations -from .operations import StreamingJobsOperations -from .operations import SubscriptionsOperations from .operations import TransformationsOperations -from .operations import Operations +from .operations import FunctionsOperations +from .operations import SubscriptionsOperations from .operations import ClustersOperations from .operations import PrivateEndpointsOperations from . import models @@ -33,20 +34,20 @@ class StreamAnalyticsManagementClient(object): """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: stream_analytics_management_client.operations.FunctionsOperations + :ivar operations: Operations operations + :vartype operations: stream_analytics_management_client.operations.Operations + :ivar streaming_jobs: StreamingJobsOperations operations + :vartype streaming_jobs: stream_analytics_management_client.operations.StreamingJobsOperations :ivar inputs: InputsOperations operations :vartype inputs: stream_analytics_management_client.operations.InputsOperations :ivar outputs: OutputsOperations operations :vartype outputs: stream_analytics_management_client.operations.OutputsOperations - :ivar streaming_jobs: StreamingJobsOperations operations - :vartype streaming_jobs: stream_analytics_management_client.operations.StreamingJobsOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: stream_analytics_management_client.operations.SubscriptionsOperations :ivar transformations: TransformationsOperations operations :vartype transformations: stream_analytics_management_client.operations.TransformationsOperations - :ivar operations: Operations operations - :vartype operations: stream_analytics_management_client.operations.Operations + :ivar functions: FunctionsOperations operations + :vartype functions: stream_analytics_management_client.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: stream_analytics_management_client.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: stream_analytics_management_client.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations @@ -77,25 +78,43 @@ def __init__( self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - self.functions = FunctionsOperations( + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations( self._client, self._config, self._serialize, self._deserialize) self.inputs = InputsOperations( self._client, self._config, self._serialize, self._deserialize) self.outputs = OutputsOperations( self._client, self._config, self._serialize, self._deserialize) - self.streaming_jobs = StreamingJobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations( - self._client, self._config, self._serialize, self._deserialize) self.transformations = TransformationsOperations( self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations( + self.functions = FunctionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations( self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations( self._client, self._config, self._serialize, self._deserialize) self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + def close(self): # type: () -> None self._client.close() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py index 3b909b5c8886..e5754a47ce68 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0rc1" +VERSION = "1.0.0b1" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py index 444e27397d52..e035713008c2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py @@ -45,6 +45,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.api_version = "2020-03-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-streamanalytics/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py index 99472b9429a0..c82bb0cffa4d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py @@ -8,6 +8,7 @@ from typing import Any, Optional, TYPE_CHECKING +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer @@ -16,13 +17,13 @@ from azure.core.credentials_async import AsyncTokenCredential from ._configuration import StreamAnalyticsManagementClientConfiguration -from .operations import FunctionsOperations +from .operations import Operations +from .operations import StreamingJobsOperations from .operations import InputsOperations from .operations import OutputsOperations -from .operations import StreamingJobsOperations -from .operations import SubscriptionsOperations from .operations import TransformationsOperations -from .operations import Operations +from .operations import FunctionsOperations +from .operations import SubscriptionsOperations from .operations import ClustersOperations from .operations import PrivateEndpointsOperations from .. import models @@ -31,20 +32,20 @@ class StreamAnalyticsManagementClient(object): """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: stream_analytics_management_client.aio.operations.FunctionsOperations + :ivar operations: Operations operations + :vartype operations: stream_analytics_management_client.aio.operations.Operations + :ivar streaming_jobs: StreamingJobsOperations operations + :vartype streaming_jobs: stream_analytics_management_client.aio.operations.StreamingJobsOperations :ivar inputs: InputsOperations operations :vartype inputs: stream_analytics_management_client.aio.operations.InputsOperations :ivar outputs: OutputsOperations operations :vartype outputs: stream_analytics_management_client.aio.operations.OutputsOperations - :ivar streaming_jobs: StreamingJobsOperations operations - :vartype streaming_jobs: stream_analytics_management_client.aio.operations.StreamingJobsOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: stream_analytics_management_client.aio.operations.SubscriptionsOperations :ivar transformations: TransformationsOperations operations :vartype transformations: stream_analytics_management_client.aio.operations.TransformationsOperations - :ivar operations: Operations operations - :vartype operations: stream_analytics_management_client.aio.operations.Operations + :ivar functions: FunctionsOperations operations + :vartype functions: stream_analytics_management_client.aio.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: stream_analytics_management_client.aio.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: stream_analytics_management_client.aio.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations @@ -74,25 +75,42 @@ def __init__( self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - self.functions = FunctionsOperations( + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.streaming_jobs = StreamingJobsOperations( self._client, self._config, self._serialize, self._deserialize) self.inputs = InputsOperations( self._client, self._config, self._serialize, self._deserialize) self.outputs = OutputsOperations( self._client, self._config, self._serialize, self._deserialize) - self.streaming_jobs = StreamingJobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations( - self._client, self._config, self._serialize, self._deserialize) self.transformations = TransformationsOperations( self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations( + self.functions = FunctionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations( self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations( self._client, self._config, self._serialize, self._deserialize) self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + async def close(self) -> None: await self._client.close() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py index a247559efb05..72cfdc41ec92 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py @@ -6,24 +6,24 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations +from ._operations import Operations +from ._streaming_jobs_operations import StreamingJobsOperations from ._inputs_operations import InputsOperations from ._outputs_operations import OutputsOperations -from ._streaming_jobs_operations import StreamingJobsOperations -from ._subscriptions_operations import SubscriptionsOperations from ._transformations_operations import TransformationsOperations -from ._operations import Operations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations __all__ = [ - 'FunctionsOperations', + 'Operations', + 'StreamingJobsOperations', 'InputsOperations', 'OutputsOperations', - 'StreamingJobsOperations', - 'SubscriptionsOperations', 'TransformationsOperations', - 'Operations', + 'FunctionsOperations', + 'SubscriptionsOperations', 'ClustersOperations', 'PrivateEndpointsOperations', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py index 29088ce6710f..a414a69f8efb 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class ClustersOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,17 +47,17 @@ async def _create_or_update_initial( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Cluster": - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + **kwargs: Any + ) -> "_models.Cluster": + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -92,7 +92,7 @@ async def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -111,11 +111,11 @@ async def begin_create_or_update( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.Cluster"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: """Creates a Stream Analytics Cluster or replaces an already existing cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -134,8 +134,8 @@ async def begin_create_or_update( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) @@ -143,7 +143,7 @@ async def begin_create_or_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -170,7 +170,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -188,16 +194,16 @@ async def _update_initial( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, - **kwargs - ) -> Optional["models.Cluster"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + **kwargs: Any + ) -> Optional["_models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -230,7 +236,7 @@ async def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -247,10 +253,10 @@ async def begin_update( self, resource_group_name: str, cluster_name: str, - cluster: "models.Cluster", + cluster: "_models.Cluster", if_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.Cluster"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.Cluster"]: """Updates an existing cluster. This can be used to partially update (ie. update one or two properties) a cluster without affecting the rest of the cluster definition. @@ -267,8 +273,8 @@ async def begin_update( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) @@ -276,7 +282,7 @@ async def begin_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -302,7 +308,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -320,8 +332,8 @@ async def get( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> "models.Cluster": + **kwargs: Any + ) -> "_models.Cluster": """Gets information about the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -333,12 +345,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.Cluster :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -364,7 +376,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Cluster', pipeline_response) @@ -379,14 +391,14 @@ async def _delete_initial( self, resource_group_name: str, cluster_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -412,7 +424,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -424,7 +436,7 @@ async def begin_delete( self, resource_group_name: str, cluster_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the specified cluster. @@ -434,8 +446,8 @@ async def begin_delete( :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -464,7 +476,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -480,8 +498,8 @@ def get_long_running_output(pipeline_response): def list_by_subscription( self, - **kwargs - ) -> AsyncIterable["models.ClusterListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: """Lists all of the clusters in the given subscription. :keyword callable cls: A custom type or function that will be passed the direct response @@ -489,12 +507,12 @@ def list_by_subscription( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -534,7 +552,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -548,8 +566,8 @@ async def get_next(next_link=None): def list_by_resource_group( self, resource_group_name: str, - **kwargs - ) -> AsyncIterable["models.ClusterListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterListResult"]: """Lists all of the clusters in the given resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -559,12 +577,12 @@ def list_by_resource_group( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -605,7 +623,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -620,8 +638,8 @@ def list_streaming_jobs( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> AsyncIterable["models.ClusterJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.ClusterJobListResult"]: """Lists all of the streaming jobs in the given cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -633,12 +651,12 @@ def list_streaming_jobs( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -680,7 +698,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py index 1d04fcab6693..f2606c033b73 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class FunctionsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, function_name: str, - function: "models.Function", + function: "_models.Function", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Creates a function or replaces an already existing function under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,12 +76,12 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -117,7 +117,8 @@ async def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -139,10 +140,10 @@ async def update( resource_group_name: str, job_name: str, function_name: str, - function: "models.Function", + function: "_models.Function", if_match: Optional[str] = None, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Updates an existing function under an existing streaming job. This can be used to partially update (ie. update one or two properties) a function without affecting the rest the job or function definition. @@ -167,12 +168,12 @@ async def update( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -206,7 +207,8 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -223,7 +225,7 @@ async def delete( resource_group_name: str, job_name: str, function_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a function from the streaming job. @@ -243,7 +245,8 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -261,6 +264,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -268,7 +272,8 @@ async def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -280,8 +285,8 @@ async def get( resource_group_name: str, job_name: str, function_name: str, - **kwargs - ) -> "models.Function": + **kwargs: Any + ) -> "_models.Function": """Gets details about the specified function. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -295,12 +300,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -327,7 +332,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -344,8 +350,8 @@ def list_by_streaming_job( resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.FunctionListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.FunctionListResult"]: """Lists all of the functions under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -361,12 +367,12 @@ def list_by_streaming_job( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.FunctionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.FunctionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -410,8 +416,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -425,15 +432,15 @@ async def _test_initial( resource_group_name: str, job_name: str, function_name: str, - function: Optional["models.Function"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -468,7 +475,8 @@ async def _test_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -485,9 +493,9 @@ async def begin_test( resource_group_name: str, job_name: str, function_name: str, - function: Optional["models.Function"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + function: Optional["_models.Function"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests if the information provided for a function is valid. This can range from testing the connection to the underlying web service behind the function or making sure the function code provided is syntactically correct. @@ -506,8 +514,8 @@ async def begin_test( :type function: ~stream_analytics_management_client.models.Function :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -515,7 +523,7 @@ async def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -541,7 +549,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -560,9 +575,9 @@ async def retrieve_default_definition( resource_group_name: str, job_name: str, function_name: str, - function_retrieve_default_definition_parameters: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] = None, - **kwargs - ) -> "models.Function": + function_retrieve_default_definition_parameters: Optional["_models.FunctionRetrieveDefaultDefinitionParameters"] = None, + **kwargs: Any + ) -> "_models.Function": """Retrieves the default definition of a function based on the parameters specified. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -579,12 +594,12 @@ async def retrieve_default_definition( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -619,7 +634,8 @@ async def retrieve_default_definition( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Function', pipeline_response) @@ -627,4 +643,4 @@ async def retrieve_default_definition( return cls(pipeline_response, deserialized, {}) return deserialized - retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} # type: ignore + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py index f7451e9e8aab..8363c2c63a54 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class InputsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, input_name: str, - input: "models.Input", + input: "_models.Input", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Creates an input or replaces an already existing input under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -75,12 +75,12 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -116,7 +116,8 @@ async def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -138,10 +139,10 @@ async def update( resource_group_name: str, job_name: str, input_name: str, - input: "models.Input", + input: "_models.Input", if_match: Optional[str] = None, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Updates an existing input under an existing streaming job. This can be used to partially update (ie. update one or two properties) an input without affecting the rest the job or input definition. @@ -165,12 +166,12 @@ async def update( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -204,7 +205,8 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -221,7 +223,7 @@ async def delete( resource_group_name: str, job_name: str, input_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an input from the streaming job. @@ -241,7 +243,8 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -259,6 +262,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -266,7 +270,8 @@ async def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -278,8 +283,8 @@ async def get( resource_group_name: str, job_name: str, input_name: str, - **kwargs - ) -> "models.Input": + **kwargs: Any + ) -> "_models.Input": """Gets details about the specified input. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -293,12 +298,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -325,7 +330,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -342,8 +348,8 @@ def list_by_streaming_job( resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.InputListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.InputListResult"]: """Lists all of the inputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -359,12 +365,12 @@ def list_by_streaming_job( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.InputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.InputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -408,8 +414,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -423,15 +430,15 @@ async def _test_initial( resource_group_name: str, job_name: str, input_name: str, - input: Optional["models.Input"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -466,7 +473,8 @@ async def _test_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -483,9 +491,9 @@ async def begin_test( resource_group_name: str, job_name: str, input_name: str, - input: Optional["models.Input"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + input: Optional["_models.Input"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -503,8 +511,8 @@ async def begin_test( :type input: ~stream_analytics_management_client.models.Input :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -512,7 +520,7 @@ async def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -538,7 +546,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py index 6e073694446f..ef1a9bdd9c6f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -33,7 +33,7 @@ class Operations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,8 +43,8 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs - ) -> AsyncIterable["models.OperationListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available Stream Analytics related operations. :keyword callable cls: A custom type or function that will be passed the direct response @@ -52,12 +52,12 @@ def list( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -93,8 +93,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py index a9d08028e8aa..800d1ff0e6b6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class OutputsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, output_name: str, - output: "models.Output", + output: "_models.Output", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Creates an output or replaces an already existing output under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,12 +76,12 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -117,7 +117,8 @@ async def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -139,10 +140,10 @@ async def update( resource_group_name: str, job_name: str, output_name: str, - output: "models.Output", + output: "_models.Output", if_match: Optional[str] = None, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Updates an existing output under an existing streaming job. This can be used to partially update (ie. update one or two properties) an output without affecting the rest the job or output definition. @@ -167,12 +168,12 @@ async def update( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -206,7 +207,8 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -223,7 +225,7 @@ async def delete( resource_group_name: str, job_name: str, output_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an output from the streaming job. @@ -243,7 +245,8 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -261,6 +264,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -268,7 +272,8 @@ async def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -280,8 +285,8 @@ async def get( resource_group_name: str, job_name: str, output_name: str, - **kwargs - ) -> "models.Output": + **kwargs: Any + ) -> "_models.Output": """Gets details about the specified output. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -295,12 +300,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -327,7 +332,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -344,8 +350,8 @@ def list_by_streaming_job( resource_group_name: str, job_name: str, select: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.OutputListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.OutputListResult"]: """Lists all of the outputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -361,12 +367,12 @@ def list_by_streaming_job( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.OutputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OutputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -410,8 +416,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -425,15 +432,15 @@ async def _test_initial( resource_group_name: str, job_name: str, output_name: str, - output: Optional["models.Output"] = None, - **kwargs - ) -> Optional["models.ResourceTestStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> Optional["_models.ResourceTestStatus"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -468,7 +475,8 @@ async def _test_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -485,9 +493,9 @@ async def begin_test( resource_group_name: str, job_name: str, output_name: str, - output: Optional["models.Output"] = None, - **kwargs - ) -> AsyncLROPoller["models.ResourceTestStatus"]: + output: Optional["_models.Output"] = None, + **kwargs: Any + ) -> AsyncLROPoller["_models.ResourceTestStatus"]: """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -505,8 +513,8 @@ async def begin_test( :type output: ~stream_analytics_management_client.models.Output :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -514,7 +522,7 @@ async def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -540,7 +548,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py index ba2975090858..b291644c31c0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class PrivateEndpointsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -48,11 +48,11 @@ async def create_or_update( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - private_endpoint: "models.PrivateEndpoint", + private_endpoint: "_models.PrivateEndpoint", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.PrivateEndpoint": + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -76,12 +76,12 @@ async def create_or_update( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -117,7 +117,7 @@ async def create_or_update( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -137,8 +137,8 @@ async def get( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs - ) -> "models.PrivateEndpoint": + **kwargs: Any + ) -> "_models.PrivateEndpoint": """Gets information about the specified Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -152,12 +152,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -184,7 +184,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpoint', pipeline_response) @@ -200,14 +200,14 @@ async def _delete_initial( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -234,7 +234,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -247,7 +247,7 @@ async def begin_delete( resource_group_name: str, cluster_name: str, private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Delete the specified private endpoint. @@ -259,8 +259,8 @@ async def begin_delete( :type private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -290,7 +290,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -308,8 +315,8 @@ def list_by_cluster( self, resource_group_name: str, cluster_name: str, - **kwargs - ) -> AsyncIterable["models.PrivateEndpointListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointListResult"]: """Lists the private endpoints in the cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -321,12 +328,12 @@ def list_by_cluster( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -368,7 +375,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py index d6cdd13560d3..d4121d7c0d60 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -35,7 +35,7 @@ class StreamingJobsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,17 +47,17 @@ async def _create_or_replace_initial( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + **kwargs: Any + ) -> "_models.StreamingJob": + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -92,7 +92,8 @@ async def _create_or_replace_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -113,11 +114,11 @@ async def begin_create_or_replace( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> AsyncLROPoller["models.StreamingJob"]: + **kwargs: Any + ) -> AsyncLROPoller["_models.StreamingJob"]: """Creates a streaming job or replaces an already existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -137,8 +138,8 @@ async def begin_create_or_replace( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either StreamingJob or the result of cls(response) @@ -146,7 +147,7 @@ async def begin_create_or_replace( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -176,7 +177,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, response_headers) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -194,10 +201,10 @@ async def update( self, resource_group_name: str, job_name: str, - streaming_job: "models.StreamingJob", + streaming_job: "_models.StreamingJob", if_match: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": + **kwargs: Any + ) -> "_models.StreamingJob": """Updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a streaming job without affecting the rest the job definition. @@ -219,12 +226,12 @@ async def update( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -257,7 +264,8 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -273,14 +281,15 @@ async def _delete_initial( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore @@ -297,6 +306,7 @@ async def _delete_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -304,7 +314,8 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -315,7 +326,7 @@ async def begin_delete( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes a streaming job. @@ -325,8 +336,8 @@ async def begin_delete( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -355,7 +366,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -374,8 +391,8 @@ async def get( resource_group_name: str, job_name: str, expand: Optional[str] = None, - **kwargs - ) -> "models.StreamingJob": + **kwargs: Any + ) -> "_models.StreamingJob": """Gets details about the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -392,12 +409,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -425,7 +442,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -441,8 +459,8 @@ def list_by_resource_group( self, resource_group_name: str, expand: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.StreamingJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -457,12 +475,12 @@ def list_by_resource_group( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -505,8 +523,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -518,8 +537,8 @@ async def get_next(next_link=None): def list( self, expand: Optional[str] = None, - **kwargs - ) -> AsyncIterable["models.StreamingJobListResult"]: + **kwargs: Any + ) -> AsyncIterable["_models.StreamingJobListResult"]: """Lists all of the streaming jobs in the given subscription. :param expand: The $expand OData query parameter. This is a comma-separated list of additional @@ -532,12 +551,12 @@ def list( :rtype: ~azure.core.async_paging.AsyncItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -579,8 +598,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -593,16 +613,17 @@ async def _start_initial( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, - **kwargs + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -620,6 +641,7 @@ async def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if start_job_parameters is not None: @@ -633,7 +655,8 @@ async def _start_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -644,8 +667,8 @@ async def begin_start( self, resource_group_name: str, job_name: str, - start_job_parameters: Optional["models.StartStreamingJobParameters"] = None, - **kwargs + start_job_parameters: Optional["_models.StartStreamingJobParameters"] = None, + **kwargs: Any ) -> AsyncLROPoller[None]: """Starts a streaming job. Once a job is started it will start processing input events and produce output. @@ -658,8 +681,8 @@ async def begin_start( :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -689,7 +712,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -707,14 +736,15 @@ async def _stop_initial( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -731,6 +761,7 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -738,7 +769,8 @@ async def _stop_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -749,7 +781,7 @@ async def begin_stop( self, resource_group_name: str, job_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a running streaming job. This will cause a running streaming job to stop processing input events and producing output. @@ -760,8 +792,8 @@ async def begin_stop( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -790,7 +822,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -803,3 +841,125 @@ def get_long_running_output(pipeline_response): else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + async def _scale_initial( + self, + resource_group_name: str, + job_name: str, + scale_job_parameters: Optional["_models.ScaleStreamingJobParameters"] = None, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._scale_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if scale_job_parameters is not None: + body_content = self._serialize.body(scale_job_parameters, 'ScaleStreamingJobParameters') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _scale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore + + async def begin_scale( + self, + resource_group_name: str, + job_name: str, + scale_job_parameters: Optional["_models.ScaleStreamingJobParameters"] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Scales a streaming job when the job is running. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param scale_job_parameters: Parameters applicable to a scale streaming job operation. + :type scale_job_parameters: ~stream_analytics_management_client.models.ScaleStreamingJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._scale_initial( + resource_group_name=resource_group_name, + job_name=job_name, + scale_job_parameters=scale_job_parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_scale.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py index 7a94e1139245..f3b815aae24c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py @@ -13,7 +13,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -32,7 +32,7 @@ class SubscriptionsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -43,8 +43,8 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def list_quotas( self, location: str, - **kwargs - ) -> "models.SubscriptionQuotasListResult": + **kwargs: Any + ) -> "_models.SubscriptionQuotasListResult": """Retrieves the subscription's current quota information in a particular region. :param location: The region in which to retrieve the subscription's quota information. You can @@ -56,12 +56,12 @@ async def list_quotas( :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionQuotasListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -86,7 +86,8 @@ async def list_quotas( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SubscriptionQuotasListResult', pipeline_response) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py index 066f372f8baa..14b932577de4 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py @@ -13,7 +13,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models +from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -32,7 +32,7 @@ class TransformationsOperations: :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -45,11 +45,11 @@ async def create_or_replace( resource_group_name: str, job_name: str, transformation_name: str, - transformation: "models.Transformation", + transformation: "_models.Transformation", if_match: Optional[str] = None, if_none_match: Optional[str] = None, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Creates a transformation or replaces an already existing transformation under an existing streaming job. @@ -75,12 +75,12 @@ async def create_or_replace( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -116,7 +116,8 @@ async def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -138,10 +139,10 @@ async def update( resource_group_name: str, job_name: str, transformation_name: str, - transformation: "models.Transformation", + transformation: "_models.Transformation", if_match: Optional[str] = None, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Updates an existing transformation under an existing streaming job. This can be used to partially update (ie. update one or two properties) a transformation without affecting the rest the job or transformation definition. @@ -167,12 +168,12 @@ async def update( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -206,7 +207,8 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -223,8 +225,8 @@ async def get( resource_group_name: str, job_name: str, transformation_name: str, - **kwargs - ) -> "models.Transformation": + **kwargs: Any + ) -> "_models.Transformation": """Gets details about the specified transformation. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -238,12 +240,12 @@ async def get( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -270,7 +272,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py index bdaa063b8a5d..ba029b3502f0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -7,26 +7,18 @@ # -------------------------------------------------------------------------- try: - from ._models_py3 import AggregateFunctionProperties from ._models_py3 import AvroSerialization from ._models_py3 import AzureDataLakeStoreOutputDataSource from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties - from ._models_py3 import AzureFunctionOutputDataSource - from ._models_py3 import AzureMachineLearningServiceFunctionBinding - from ._models_py3 import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import AzureMachineLearningServiceInputColumn - from ._models_py3 import AzureMachineLearningServiceInputs - from ._models_py3 import AzureMachineLearningServiceOutputColumn - from ._models_py3 import AzureMachineLearningStudioFunctionBinding - from ._models_py3 import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters - from ._models_py3 import AzureMachineLearningStudioInputColumn - from ._models_py3 import AzureMachineLearningStudioInputs - from ._models_py3 import AzureMachineLearningStudioOutputColumn + from ._models_py3 import AzureMachineLearningWebServiceFunctionBinding + from ._models_py3 import AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters + from ._models_py3 import AzureMachineLearningWebServiceInputColumn + from ._models_py3 import AzureMachineLearningWebServiceInputs + from ._models_py3 import AzureMachineLearningWebServiceOutputColumn from ._models_py3 import AzureSqlDatabaseDataSourceProperties from ._models_py3 import AzureSqlDatabaseOutputDataSource from ._models_py3 import AzureSqlDatabaseOutputDataSourceProperties from ._models_py3 import AzureSqlReferenceInputDataSource - from ._models_py3 import AzureSqlReferenceInputDataSourceProperties from ._models_py3 import AzureSynapseDataSourceProperties from ._models_py3 import AzureSynapseOutputDataSource from ._models_py3 import AzureSynapseOutputDataSourceProperties @@ -38,8 +30,6 @@ from ._models_py3 import BlobReferenceInputDataSourceProperties from ._models_py3 import BlobStreamInputDataSource from ._models_py3 import BlobStreamInputDataSourceProperties - from ._models_py3 import CSharpFunctionBinding - from ._models_py3 import CSharpFunctionRetrieveDefaultDefinitionParameters from ._models_py3 import Cluster from ._models_py3 import ClusterInfo from ._models_py3 import ClusterJob @@ -49,13 +39,12 @@ from ._models_py3 import ClusterSku from ._models_py3 import Compression from ._models_py3 import CsvSerialization - from ._models_py3 import CustomClrSerialization from ._models_py3 import DiagnosticCondition from ._models_py3 import Diagnostics from ._models_py3 import DocumentDbOutputDataSource from ._models_py3 import Error - from ._models_py3 import ErrorAutoGenerated from ._models_py3 import ErrorDetails + from ._models_py3 import ErrorError from ._models_py3 import ErrorResponse from ._models_py3 import EventHubDataSourceProperties from ._models_py3 import EventHubOutputDataSource @@ -64,7 +53,6 @@ from ._models_py3 import EventHubStreamInputDataSourceProperties from ._models_py3 import EventHubV2OutputDataSource from ._models_py3 import EventHubV2StreamInputDataSource - from ._models_py3 import External from ._models_py3 import Function from ._models_py3 import FunctionBinding from ._models_py3 import FunctionInput @@ -102,45 +90,38 @@ from ._models_py3 import Resource from ._models_py3 import ResourceTestStatus from ._models_py3 import ScalarFunctionProperties + from ._models_py3 import ScaleStreamingJobParameters from ._models_py3 import Serialization from ._models_py3 import ServiceBusDataSourceProperties from ._models_py3 import ServiceBusQueueOutputDataSource from ._models_py3 import ServiceBusQueueOutputDataSourceProperties from ._models_py3 import ServiceBusTopicOutputDataSource from ._models_py3 import ServiceBusTopicOutputDataSourceProperties + from ._models_py3 import Sku from ._models_py3 import StartStreamingJobParameters from ._models_py3 import StorageAccount from ._models_py3 import StreamInputDataSource from ._models_py3 import StreamInputProperties from ._models_py3 import StreamingJob from ._models_py3 import StreamingJobListResult - from ._models_py3 import StreamingJobSku from ._models_py3 import SubResource from ._models_py3 import SubscriptionQuota from ._models_py3 import SubscriptionQuotasListResult from ._models_py3 import TrackedResource from ._models_py3 import Transformation except (SyntaxError, ImportError): - from ._models import AggregateFunctionProperties # type: ignore from ._models import AvroSerialization # type: ignore from ._models import AzureDataLakeStoreOutputDataSource # type: ignore from ._models import AzureDataLakeStoreOutputDataSourceProperties # type: ignore - from ._models import AzureFunctionOutputDataSource # type: ignore - from ._models import AzureMachineLearningServiceFunctionBinding # type: ignore - from ._models import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import AzureMachineLearningServiceInputColumn # type: ignore - from ._models import AzureMachineLearningServiceInputs # type: ignore - from ._models import AzureMachineLearningServiceOutputColumn # type: ignore - from ._models import AzureMachineLearningStudioFunctionBinding # type: ignore - from ._models import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters # type: ignore - from ._models import AzureMachineLearningStudioInputColumn # type: ignore - from ._models import AzureMachineLearningStudioInputs # type: ignore - from ._models import AzureMachineLearningStudioOutputColumn # type: ignore + from ._models import AzureMachineLearningWebServiceFunctionBinding # type: ignore + from ._models import AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters # type: ignore + from ._models import AzureMachineLearningWebServiceInputColumn # type: ignore + from ._models import AzureMachineLearningWebServiceInputs # type: ignore + from ._models import AzureMachineLearningWebServiceOutputColumn # type: ignore from ._models import AzureSqlDatabaseDataSourceProperties # type: ignore from ._models import AzureSqlDatabaseOutputDataSource # type: ignore from ._models import AzureSqlDatabaseOutputDataSourceProperties # type: ignore from ._models import AzureSqlReferenceInputDataSource # type: ignore - from ._models import AzureSqlReferenceInputDataSourceProperties # type: ignore from ._models import AzureSynapseDataSourceProperties # type: ignore from ._models import AzureSynapseOutputDataSource # type: ignore from ._models import AzureSynapseOutputDataSourceProperties # type: ignore @@ -152,8 +133,6 @@ from ._models import BlobReferenceInputDataSourceProperties # type: ignore from ._models import BlobStreamInputDataSource # type: ignore from ._models import BlobStreamInputDataSourceProperties # type: ignore - from ._models import CSharpFunctionBinding # type: ignore - from ._models import CSharpFunctionRetrieveDefaultDefinitionParameters # type: ignore from ._models import Cluster # type: ignore from ._models import ClusterInfo # type: ignore from ._models import ClusterJob # type: ignore @@ -163,13 +142,12 @@ from ._models import ClusterSku # type: ignore from ._models import Compression # type: ignore from ._models import CsvSerialization # type: ignore - from ._models import CustomClrSerialization # type: ignore from ._models import DiagnosticCondition # type: ignore from ._models import Diagnostics # type: ignore from ._models import DocumentDbOutputDataSource # type: ignore from ._models import Error # type: ignore - from ._models import ErrorAutoGenerated # type: ignore from ._models import ErrorDetails # type: ignore + from ._models import ErrorError # type: ignore from ._models import ErrorResponse # type: ignore from ._models import EventHubDataSourceProperties # type: ignore from ._models import EventHubOutputDataSource # type: ignore @@ -178,7 +156,6 @@ from ._models import EventHubStreamInputDataSourceProperties # type: ignore from ._models import EventHubV2OutputDataSource # type: ignore from ._models import EventHubV2StreamInputDataSource # type: ignore - from ._models import External # type: ignore from ._models import Function # type: ignore from ._models import FunctionBinding # type: ignore from ._models import FunctionInput # type: ignore @@ -216,19 +193,20 @@ from ._models import Resource # type: ignore from ._models import ResourceTestStatus # type: ignore from ._models import ScalarFunctionProperties # type: ignore + from ._models import ScaleStreamingJobParameters # type: ignore from ._models import Serialization # type: ignore from ._models import ServiceBusDataSourceProperties # type: ignore from ._models import ServiceBusQueueOutputDataSource # type: ignore from ._models import ServiceBusQueueOutputDataSourceProperties # type: ignore from ._models import ServiceBusTopicOutputDataSource # type: ignore from ._models import ServiceBusTopicOutputDataSourceProperties # type: ignore + from ._models import Sku # type: ignore from ._models import StartStreamingJobParameters # type: ignore from ._models import StorageAccount # type: ignore from ._models import StreamInputDataSource # type: ignore from ._models import StreamInputProperties # type: ignore from ._models import StreamingJob # type: ignore from ._models import StreamingJobListResult # type: ignore - from ._models import StreamingJobSku # type: ignore from ._models import SubResource # type: ignore from ._models import SubscriptionQuota # type: ignore from ._models import SubscriptionQuotasListResult # type: ignore @@ -240,6 +218,7 @@ ClusterProvisioningState, ClusterSkuName, CompatibilityLevel, + CompressionType, ContentStoragePolicy, Encoding, EventSerializationType, @@ -249,30 +228,23 @@ JsonOutputSerializationFormat, OutputErrorPolicy, OutputStartMode, - StreamingJobSkuName, + RefreshType, + SkuName, ) __all__ = [ - 'AggregateFunctionProperties', 'AvroSerialization', 'AzureDataLakeStoreOutputDataSource', 'AzureDataLakeStoreOutputDataSourceProperties', - 'AzureFunctionOutputDataSource', - 'AzureMachineLearningServiceFunctionBinding', - 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', - 'AzureMachineLearningServiceInputColumn', - 'AzureMachineLearningServiceInputs', - 'AzureMachineLearningServiceOutputColumn', - 'AzureMachineLearningStudioFunctionBinding', - 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', - 'AzureMachineLearningStudioInputColumn', - 'AzureMachineLearningStudioInputs', - 'AzureMachineLearningStudioOutputColumn', + 'AzureMachineLearningWebServiceFunctionBinding', + 'AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters', + 'AzureMachineLearningWebServiceInputColumn', + 'AzureMachineLearningWebServiceInputs', + 'AzureMachineLearningWebServiceOutputColumn', 'AzureSqlDatabaseDataSourceProperties', 'AzureSqlDatabaseOutputDataSource', 'AzureSqlDatabaseOutputDataSourceProperties', 'AzureSqlReferenceInputDataSource', - 'AzureSqlReferenceInputDataSourceProperties', 'AzureSynapseDataSourceProperties', 'AzureSynapseOutputDataSource', 'AzureSynapseOutputDataSourceProperties', @@ -284,8 +256,6 @@ 'BlobReferenceInputDataSourceProperties', 'BlobStreamInputDataSource', 'BlobStreamInputDataSourceProperties', - 'CSharpFunctionBinding', - 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Cluster', 'ClusterInfo', 'ClusterJob', @@ -295,13 +265,12 @@ 'ClusterSku', 'Compression', 'CsvSerialization', - 'CustomClrSerialization', 'DiagnosticCondition', 'Diagnostics', 'DocumentDbOutputDataSource', 'Error', - 'ErrorAutoGenerated', 'ErrorDetails', + 'ErrorError', 'ErrorResponse', 'EventHubDataSourceProperties', 'EventHubOutputDataSource', @@ -310,7 +279,6 @@ 'EventHubStreamInputDataSourceProperties', 'EventHubV2OutputDataSource', 'EventHubV2StreamInputDataSource', - 'External', 'Function', 'FunctionBinding', 'FunctionInput', @@ -348,19 +316,20 @@ 'Resource', 'ResourceTestStatus', 'ScalarFunctionProperties', + 'ScaleStreamingJobParameters', 'Serialization', 'ServiceBusDataSourceProperties', 'ServiceBusQueueOutputDataSource', 'ServiceBusQueueOutputDataSourceProperties', 'ServiceBusTopicOutputDataSource', 'ServiceBusTopicOutputDataSourceProperties', + 'Sku', 'StartStreamingJobParameters', 'StorageAccount', 'StreamInputDataSource', 'StreamInputProperties', 'StreamingJob', 'StreamingJobListResult', - 'StreamingJobSku', 'SubResource', 'SubscriptionQuota', 'SubscriptionQuotasListResult', @@ -370,6 +339,7 @@ 'ClusterProvisioningState', 'ClusterSkuName', 'CompatibilityLevel', + 'CompressionType', 'ContentStoragePolicy', 'Encoding', 'EventSerializationType', @@ -379,5 +349,6 @@ 'JsonOutputSerializationFormat', 'OutputErrorPolicy', 'OutputStartMode', - 'StreamingJobSkuName', + 'RefreshType', + 'SkuName', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py index 100ff571855a..a0856adb0dbc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models.py @@ -10,114 +10,17 @@ import msrest.serialization -class FunctionProperties(msrest.serialization.Model): - """The properties that are associated with a function. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AggregateFunctionProperties, ScalarFunctionProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - _subtype_map = { - 'type': {'Aggregate': 'AggregateFunctionProperties', 'Scalar': 'ScalarFunctionProperties'} - } - - def __init__( - self, - **kwargs - ): - super(FunctionProperties, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.etag = None - self.inputs = kwargs.get('inputs', None) - self.output = kwargs.get('output', None) - self.binding = kwargs.get('binding', None) - - -class AggregateFunctionProperties(FunctionProperties): - """The properties that are associated with an aggregate function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - def __init__( - self, - **kwargs - ): - super(AggregateFunctionProperties, self).__init__(**kwargs) - self.type = 'Aggregate' # type: str - - class Serialization(msrest.serialization.Model): """Describes how data from an input is serialized or how data is serialized when written to an output. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, ParquetSerialization. + sub-classes are: AvroSerialization, CsvSerialization, JsonSerialization, ParquetSerialization. All required parameters must be populated in order to send to Azure. :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType """ @@ -130,7 +33,7 @@ class Serialization(msrest.serialization.Model): } _subtype_map = { - 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'CustomClr': 'CustomClrSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} + 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} } def __init__( @@ -148,11 +51,11 @@ class AvroSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -177,7 +80,7 @@ class OutputDataSource(msrest.serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + sub-classes are: AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. All required parameters must be populated in order to send to Azure. @@ -195,7 +98,7 @@ class OutputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + 'type': {'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} } def __init__( @@ -382,61 +285,11 @@ def __init__( self.authentication_mode = kwargs.get('authentication_mode', None) -class AzureFunctionOutputDataSource(OutputDataSource): - """Defines the metadata of AzureFunctionOutputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param function_app_name: The name of your Azure Functions app. - :type function_app_name: str - :param function_name: The name of the function in your Azure Functions app. - :type function_name: str - :param api_key: If you want to use an Azure Function from another subscription, you can do so - by providing the key to access your function. - :type api_key: str - :param max_batch_size: A property that lets you set the maximum size for each output batch - that's sent to your Azure function. The input unit is in bytes. By default, this value is - 262,144 bytes (256 KB). - :type max_batch_size: float - :param max_batch_count: A property that lets you specify the maximum number of events in each - batch that's sent to Azure Functions. The default value is 100. - :type max_batch_count: float - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, - 'function_name': {'key': 'properties.functionName', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, - 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureFunctionOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.AzureFunction' # type: str - self.function_app_name = kwargs.get('function_app_name', None) - self.function_name = kwargs.get('function_name', None) - self.api_key = kwargs.get('api_key', None) - self.max_batch_size = kwargs.get('max_batch_size', None) - self.max_batch_count = kwargs.get('max_batch_count', None) - - class FunctionBinding(msrest.serialization.Model): """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, CSharpFunctionBinding, JavaScriptFunctionBinding. + sub-classes are: AzureMachineLearningWebServiceFunctionBinding, JavaScriptFunctionBinding. All required parameters must be populated in order to send to Azure. @@ -453,7 +306,7 @@ class FunctionBinding(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningWebServiceFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} } def __init__( @@ -464,7 +317,7 @@ def __init__( self.type = None # type: Optional[str] -class AzureMachineLearningServiceFunctionBinding(FunctionBinding): +class AzureMachineLearningWebServiceFunctionBinding(FunctionBinding): """The binding to an Azure Machine Learning web service. All required parameters must be populated in order to send to Azure. @@ -472,23 +325,20 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): :param type: Required. Indicates the function binding type.Constant filled by server. :type type: str :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type endpoint: str :param api_key: The API key used to authenticate with Request-Response endpoint. :type api_key: str :param inputs: The inputs for the Azure Machine Learning web service endpoint. - :type inputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputs :param outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceOutputColumn] + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceOutputColumn] :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. :type batch_size: int - :param number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :type number_of_parallel_requests: int """ _validation = { @@ -499,31 +349,29 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningWebServiceInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningWebServiceOutputColumn]'}, 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, } def __init__( self, **kwargs ): - super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearningServices' # type: str + super(AzureMachineLearningWebServiceFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearning/WebService' # type: str self.endpoint = kwargs.get('endpoint', None) self.api_key = kwargs.get('api_key', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.batch_size = kwargs.get('batch_size', None) - self.number_of_parallel_requests = kwargs.get('number_of_parallel_requests', None) class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): """Parameters used to specify the type of function to retrieve the default definition for. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, CSharpFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. + sub-classes are: AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. All required parameters must be populated in order to send to Azure. @@ -540,7 +388,7 @@ class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): } _subtype_map = { - 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} } def __init__( @@ -551,25 +399,24 @@ def __init__( self.binding_type = None # type: Optional[str] -class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): +class AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. :type binding_type: str :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - web service. + web service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -578,23 +425,24 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs ): - super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearningServices' # type: str + super(AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str self.execute_endpoint = kwargs.get('execute_endpoint', None) + self.udf_type = kwargs.get('udf_type', None) -class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): +class AzureMachineLearningWebServiceInputColumn(msrest.serialization.Model): """Describes an input column for the Azure Machine Learning web service endpoint. :param name: The name of the input column. :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. + :param data_type: The (Azure Machine Learning supported) data type of the input column. A list + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str :param map_to: The zero based index of the function parameter this input maps to. :type map_to: int @@ -610,13 +458,13 @@ def __init__( self, **kwargs ): - super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + super(AzureMachineLearningWebServiceInputColumn, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_type = kwargs.get('data_type', None) self.map_to = kwargs.get('map_to', None) -class AzureMachineLearningServiceInputs(msrest.serialization.Model): +class AzureMachineLearningWebServiceInputs(msrest.serialization.Model): """The inputs for the Azure Machine Learning web service endpoint. :param name: The name of the input. This is the name provided while authoring the endpoint. @@ -624,199 +472,31 @@ class AzureMachineLearningServiceInputs(msrest.serialization.Model): :param column_names: A list of input columns for the Azure Machine Learning web service endpoint. :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputColumn] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningWebServiceInputColumn]'}, } def __init__( self, **kwargs ): - super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + super(AzureMachineLearningWebServiceInputs, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.column_names = kwargs.get('column_names', None) -class AzureMachineLearningServiceOutputColumn(msrest.serialization.Model): +class AzureMachineLearningWebServiceOutputColumn(msrest.serialization.Model): """Describes an output column for the Azure Machine Learning web service endpoint. - :param name: The name of the output column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the output column. - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.data_type = kwargs.get('data_type', None) - self.map_to = kwargs.get('map_to', None) - - -class AzureMachineLearningStudioFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning Studio. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- - consume-web-services#request-response-service-rrs. - :type endpoint: str - :param api_key: The API key used to authenticate with Request-Response endpoint. - :type api_key: str - :param inputs: The inputs for the Azure Machine Learning Studio endpoint. - :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningStudioInputs - :param outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioOutputColumn] - :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :type batch_size: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, - 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearning/WebService' # type: str - self.endpoint = kwargs.get('endpoint', None) - self.api_key = kwargs.get('api_key', None) - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.batch_size = kwargs.get('batch_size', None) - - -class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- - learning-consume-web-services#request-response-service-rrs. - :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str - self.execute_endpoint = kwargs.get('execute_endpoint', None) - - -class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): - """Describes an input column for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.data_type = kwargs.get('data_type', None) - self.map_to = kwargs.get('map_to', None) - - -class AzureMachineLearningStudioInputs(msrest.serialization.Model): - """The inputs for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input. This is the name provided while authoring the endpoint. - :type name: str - :param column_names: A list of input columns for the Azure Machine Learning Studio endpoint. - :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioInputColumn] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.column_names = kwargs.get('column_names', None) - - -class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): - """Describes an output column for the Azure Machine Learning Studio endpoint. - :param name: The name of the output column. :type name: str :param data_type: The (Azure Machine Learning supported) data type of the output column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str """ @@ -829,7 +509,7 @@ def __init__( self, **kwargs ): - super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + super(AzureMachineLearningWebServiceOutputColumn, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.data_type = kwargs.get('data_type', None) @@ -855,8 +535,8 @@ class AzureSqlDatabaseDataSourceProperties(msrest.serialization.Model): :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. + :param max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. :type max_writer_count: float :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". @@ -915,8 +595,8 @@ class AzureSqlDatabaseOutputDataSource(OutputDataSource): :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. + :param max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. :type max_writer_count: float :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". @@ -976,8 +656,8 @@ class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourcePrope :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. + :param max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. :type max_writer_count: float :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". @@ -1043,32 +723,6 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): :param type: Required. Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests.Constant filled by server. :type type: str - :param properties: - :type properties: - ~stream_analytics_management_client.models.AzureSqlReferenceInputDataSourceProperties - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Sql/Server/Database' # type: str - self.properties = kwargs.get('properties', None) - - -class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): - """AzureSqlReferenceInputDataSourceProperties. - :param server: This element is associated with the datasource element. This is the name of the server that contains the database that will be written to. :type server: str @@ -1084,10 +738,9 @@ class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): :param table: This element is associated with the datasource element. The name of the table in the Azure SQL database.. :type table: str - :param refresh_type: This element is associated with the datasource element. This element is of - enum type. It indicates what kind of data refresh option do we want to - use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta. - :type refresh_type: str + :param refresh_type: Indicates the type of data refresh option. Possible values include: + "Static", "RefreshPeriodicallyWithFull", "RefreshPeriodicallyWithDelta". + :type refresh_type: str or ~stream_analytics_management_client.models.RefreshType :param refresh_rate: This element is associated with the datasource element. This indicates how frequently the data will be fetched from the database. It is of DateTime format. :type refresh_rate: str @@ -1100,23 +753,29 @@ class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): :type delta_snapshot_query: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'refresh_type': {'key': 'refreshType', 'type': 'str'}, - 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, - 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, - 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'refresh_type': {'key': 'properties.refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'properties.refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'properties.fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'properties.deltaSnapshotQuery', 'type': 'str'}, } def __init__( self, **kwargs ): - super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str self.server = kwargs.get('server', None) self.database = kwargs.get('database', None) self.user = kwargs.get('user', None) @@ -1327,10 +986,10 @@ class BlobDataSourceProperties(msrest.serialization.Model): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1377,10 +1036,10 @@ class BlobOutputDataSource(OutputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1433,10 +1092,10 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1483,10 +1142,10 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1534,10 +1193,10 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1612,10 +1271,10 @@ class BlobStreamInputDataSource(StreamInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1668,10 +1327,10 @@ class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1702,7 +1361,7 @@ def __init__( class Resource(msrest.serialization.Model): - """Resource. + """The base resource definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -2037,102 +1696,25 @@ class Compression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Compression, self).__init__(**kwargs) - self.type = kwargs['type'] - - -class CSharpFunctionBinding(FunctionBinding): - """The binding to a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param script: The Csharp code containing a single function definition. - :type script: str - :param dll_path: The Csharp code containing a single function definition. - :type dll_path: str - :param class_property: The Csharp code containing a single function definition. - :type class_property: str - :param method: The Csharp code containing a single function definition. - :type method: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'script': {'key': 'properties.script', 'type': 'str'}, - 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, - 'class_property': {'key': 'properties.class', 'type': 'str'}, - 'method': {'key': 'properties.method', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CSharpFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = kwargs.get('script', None) - self.dll_path = kwargs.get('dll_path', None) - self.class_property = kwargs.get('class_property', None) - self.method = kwargs.get('method', None) - - -class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a CSharp function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param script: The CSharp code containing a single function definition. - :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param type: Required. Indicates the type of compression that the input uses. Required on PUT + (CreateOrReplace) requests. Possible values include: "None", "GZip", "Deflate". + :type type: str or ~stream_analytics_management_client.models.CompressionType """ _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, + 'type': {'required': True}, } _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs ): - super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = kwargs.get('script', None) + super(Compression, self).__init__(**kwargs) + self.type = kwargs['type'] class CsvSerialization(Serialization): @@ -2142,12 +1724,13 @@ class CsvSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated - value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- - analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- - output for a list of supported values. Required on PUT (CreateOrReplace) requests. + value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of + supported values. Required on PUT (CreateOrReplace) requests. :type field_delimiter: str :param encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -2175,41 +1758,6 @@ def __init__( self.encoding = kwargs.get('encoding', None) -class CustomClrSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param serialization_dll_path: The serialization library path. - :type serialization_dll_path: str - :param serialization_class_name: The serialization class name. - :type serialization_class_name: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, - 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CustomClrSerialization, self).__init__(**kwargs) - self.type = 'CustomClr' # type: str - self.serialization_dll_path = kwargs.get('serialization_dll_path', None) - self.serialization_class_name = kwargs.get('serialization_class_name', None) - - class DiagnosticCondition(msrest.serialization.Model): """Condition applicable to the resource, or to the job overall, that warrant customer attention. @@ -2292,9 +1840,9 @@ class DocumentDbOutputDataSource(OutputDataSource): :type database: str :param collection_name_pattern: The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where - partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT - (CreateOrReplace) requests. + partitions start from 0. See the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more + information. Required on PUT (CreateOrReplace) requests. :type collection_name_pattern: str :param partition_key: The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} @@ -2337,11 +1885,11 @@ class Error(msrest.serialization.Model): """Common error representation. :param error: Error definition properties. - :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + :type error: ~stream_analytics_management_client.models.ErrorError """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + 'error': {'key': 'error', 'type': 'ErrorError'}, } def __init__( @@ -2352,62 +1900,62 @@ def __init__( self.error = kwargs.get('error', None) -class ErrorAutoGenerated(msrest.serialization.Model): - """Error definition properties. +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. :param code: Error code. :type code: str - :param message: Error message. - :type message: str :param target: Error target. :type target: str - :param details: Error details. - :type details: list[~stream_analytics_management_client.models.ErrorDetails] + :param message: Error message. + :type message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ErrorAutoGenerated, self).__init__(**kwargs) + super(ErrorDetails, self).__init__(**kwargs) self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + self.message = kwargs.get('message', None) -class ErrorDetails(msrest.serialization.Model): - """Common error details representation. +class ErrorError(msrest.serialization.Model): + """Error definition properties. :param code: Error code. :type code: str - :param target: Error target. - :type target: str :param message: Error message. :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~stream_analytics_management_client.models.ErrorDetails] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, } def __init__( self, **kwargs ): - super(ErrorDetails, self).__init__(**kwargs) + super(ErrorError, self).__init__(**kwargs) self.code = kwargs.get('code', None) - self.target = kwargs.get('target', None) self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) class ErrorResponse(msrest.serialization.Model): @@ -2535,7 +2083,7 @@ class EventHubOutputDataSource(OutputDataSource): :param partition_key: The key/column that is used to determine to which partition to send event data. :type partition_key: str - :param property_columns: + :param property_columns: The properties associated with this Event Hub output. :type property_columns: list[str] """ @@ -2589,7 +2137,7 @@ class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): :param partition_key: The key/column that is used to determine to which partition to send event data. :type partition_key: str - :param property_columns: + :param property_columns: The properties associated with this Event Hub output. :type property_columns: list[str] """ @@ -2735,7 +2283,7 @@ class EventHubV2OutputDataSource(OutputDataSource): :param partition_key: The key/column that is used to determine to which partition to send event data. :type partition_key: str - :param property_columns: + :param property_columns: The properties associated with this Event Hub output. :type property_columns: list[str] """ @@ -2826,33 +2374,6 @@ def __init__( self.consumer_group_name = kwargs.get('consumer_group_name', None) -class External(msrest.serialization.Model): - """The storage account where the custom code artifacts are located. - - :param storage_account: The properties that are associated with an Azure Storage account. - :type storage_account: ~stream_analytics_management_client.models.StorageAccount - :param container: - :type container: str - :param path: - :type path: str - """ - - _attribute_map = { - 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(External, self).__init__(**kwargs) - self.storage_account = kwargs.get('storage_account', None) - self.container = kwargs.get('container', None) - self.path = kwargs.get('path', None) - - class SubResource(msrest.serialization.Model): """The base sub-resource model definition. @@ -2982,8 +2503,8 @@ class FunctionOutput(msrest.serialization.Model): """Describes the output of a function. :param data_type: The (Azure Stream Analytics supported) data type of the function output. A - list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn835065.aspx. + list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. :type data_type: str """ @@ -2999,14 +2520,55 @@ def __init__( self.data_type = kwargs.get('data_type', None) +class FunctionProperties(msrest.serialization.Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ScalarFunctionProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Scalar': 'ScalarFunctionProperties'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.etag = None + + class Identity(msrest.serialization.Model): """Describes how identity is verified. - :param tenant_id: + :param tenant_id: The identity tenantId. :type tenant_id: str - :param principal_id: + :param principal_id: The identity principal ID. :type principal_id: str - :param type: + :param type: The identity type. :type type: str """ @@ -3239,8 +2801,6 @@ def __init__( class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a JavaScript function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. @@ -3248,13 +2808,13 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa :param script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -3263,8 +2823,6 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, **kwargs @@ -3272,6 +2830,7 @@ def __init__( super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str self.script = kwargs.get('script', None) + self.udf_type = kwargs.get('udf_type', None) class StorageAccount(msrest.serialization.Model): @@ -3334,7 +2893,7 @@ class JsonSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -3376,6 +2935,8 @@ class Operation(msrest.serialization.Model): :ivar name: The name of the operation being performed on this particular object. :vartype name: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool :ivar display: Contains the localized display information for this particular operation / action. :vartype display: ~stream_analytics_management_client.models.OperationDisplay @@ -3388,6 +2949,7 @@ class Operation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } @@ -3397,6 +2959,7 @@ def __init__( ): super(Operation, self).__init__(**kwargs) self.name = None + self.is_data_action = kwargs.get('is_data_action', None) self.display = None @@ -3486,9 +3049,9 @@ class Output(SubResource): :param datasource: Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests. :type datasource: ~stream_analytics_management_client.models.OutputDataSource - :param time_window: + :param time_window: The time frame for filtering Stream Analytics job outputs. :type time_window: str - :param size_window: + :param size_window: The size window to constrain a Stream Analytics output to. :type size_window: float :param serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. @@ -3571,11 +3134,11 @@ class ParquetSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -3722,7 +3285,41 @@ def __init__( self.authentication_mode = kwargs.get('authentication_mode', None) -class PrivateEndpoint(Resource): +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): """Complete information about the private endpoint. Variables are only populated by the server, and will be ignored when sending a request. @@ -3868,21 +3465,27 @@ def __init__( class PrivateLinkServiceConnection(msrest.serialization.Model): """A grouping of information about the connection to the remote resource. + Variables are only populated by the server, and will be ignored when sending a request. + :param private_link_service_id: The resource id of the private link service. Required on PUT (CreateOrUpdate) requests. :type private_link_service_id: str :param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. :type group_ids: list[str] - :param request_message: A message passed to the owner of the remote resource with this + :ivar request_message: A message passed to the owner of the remote resource with this connection request. Restricted to 140 chars. - :type request_message: str + :vartype request_message: str :param private_link_service_connection_state: A collection of read-only information about the state of the connection to the private remote resource. :type private_link_service_connection_state: ~stream_analytics_management_client.models.PrivateLinkConnectionState """ + _validation = { + 'request_message': {'readonly': True}, + } + _attribute_map = { 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, @@ -3897,44 +3500,10 @@ def __init__( super(PrivateLinkServiceConnection, self).__init__(**kwargs) self.private_link_service_id = kwargs.get('private_link_service_id', None) self.group_ids = kwargs.get('group_ids', None) - self.request_message = kwargs.get('request_message', None) + self.request_message = None self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyResource, self).__init__(**kwargs) - - class ReferenceInputProperties(InputProperties): """The properties that are associated with an input containing reference data. @@ -4030,12 +3599,12 @@ class ScalarFunctionProperties(FunctionProperties): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param inputs: + :param inputs: A list of inputs describing the parameters of the function. :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. + :param output: The output of the function. :type output: ~stream_analytics_management_client.models.FunctionOutput :param binding: The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. @@ -4061,6 +3630,29 @@ def __init__( ): super(ScalarFunctionProperties, self).__init__(**kwargs) self.type = 'Scalar' # type: str + self.inputs = kwargs.get('inputs', None) + self.output = kwargs.get('output', None) + self.binding = kwargs.get('binding', None) + + +class ScaleStreamingJobParameters(msrest.serialization.Model): + """Parameters supplied to the Scale Streaming Job operation. + + :param streaming_units: Specifies the number of streaming units that the streaming job will + scale to. + :type streaming_units: int + """ + + _attribute_map = { + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ScaleStreamingJobParameters, self).__init__(**kwargs) + self.streaming_units = kwargs.get('streaming_units', None) class ServiceBusQueueOutputDataSource(OutputDataSource): @@ -4089,8 +3681,11 @@ class ServiceBusQueueOutputDataSource(OutputDataSource): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :param system_property_columns: The system properties associated with the Service Bus Queue. + The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :type system_property_columns: any """ _validation = { @@ -4105,7 +3700,7 @@ class ServiceBusQueueOutputDataSource(OutputDataSource): 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': 'object'}, } def __init__( @@ -4144,8 +3739,11 @@ class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :param system_property_columns: The system properties associated with the Service Bus Queue. + The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :type system_property_columns: any """ _attribute_map = { @@ -4155,7 +3753,7 @@ class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, 'queue_name': {'key': 'queueName', 'type': 'str'}, 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': 'object'}, } def __init__( @@ -4194,7 +3792,10 @@ class ServiceBusTopicOutputDataSource(OutputDataSource): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. + :param system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. :type system_property_columns: dict[str, str] """ @@ -4249,7 +3850,10 @@ class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. + :param system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. :type system_property_columns: dict[str, str] """ @@ -4273,6 +3877,26 @@ def __init__( self.system_property_columns = kwargs.get('system_property_columns', None) +class Sku(msrest.serialization.Model): + """The properties that are associated with a SKU. + + :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values + include: "Standard". + :type name: str or ~stream_analytics_management_client.models.SkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + class StartStreamingJobParameters(msrest.serialization.Model): """Parameters supplied to the Start Streaming Job operation. @@ -4324,7 +3948,7 @@ class StreamingJob(TrackedResource): be used to authenticate with inputs and outputs. :type identity: ~stream_analytics_management_client.models.Identity :param sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. - :type sku: ~stream_analytics_management_client.models.StreamingJobSku + :type sku: ~stream_analytics_management_client.models.Sku :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. :vartype job_id: str @@ -4369,12 +3993,12 @@ class StreamingJob(TrackedResource): have a value of -1. :type events_late_arrival_max_delay_in_seconds: int :param data_locale: The data locale of the stream analytics job. Value should be the name of a - supported .NET Culture from the set https://msdn.microsoft.com/en- - us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none - specified. + supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. :type data_locale: str :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible - values include: "1.0". + values include: "1.0", "1.2". :type compatibility_level: str or ~stream_analytics_management_client.models.CompatibilityLevel :ivar created_date: Value is an ISO-8601 formatted UTC timestamp indicating when the streaming job was created. @@ -4398,19 +4022,17 @@ class StreamingJob(TrackedResource): transformation. :type functions: list[~stream_analytics_management_client.models.Function] :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use - it to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param job_storage_account: The properties that are associated with an Azure Storage account with MSI. :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount - :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to + :param content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Possible values include: "SystemAccount", "JobStorageAccount". - :vartype content_storage_policy: str or + :type content_storage_policy: str or ~stream_analytics_management_client.models.ContentStoragePolicy - :param externals: The storage account where the custom code artifacts are located. - :type externals: ~stream_analytics_management_client.models.External :param cluster: The cluster which streaming jobs will run on. :type cluster: ~stream_analytics_management_client.models.ClusterInfo """ @@ -4425,7 +4047,6 @@ class StreamingJob(TrackedResource): 'last_output_event_time': {'readonly': True}, 'created_date': {'readonly': True}, 'etag': {'readonly': True}, - 'content_storage_policy': {'readonly': True}, } _attribute_map = { @@ -4435,7 +4056,7 @@ class StreamingJob(TrackedResource): 'tags': {'key': 'tags', 'type': '{str}'}, 'location': {'key': 'location', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'Identity'}, - 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'sku': {'key': 'properties.sku', 'type': 'Sku'}, 'job_id': {'key': 'properties.jobId', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'job_state': {'key': 'properties.jobState', 'type': 'str'}, @@ -4457,7 +4078,6 @@ class StreamingJob(TrackedResource): 'etag': {'key': 'properties.etag', 'type': 'str'}, 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, - 'externals': {'key': 'properties.externals', 'type': 'External'}, 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, } @@ -4488,8 +4108,7 @@ def __init__( self.functions = kwargs.get('functions', None) self.etag = None self.job_storage_account = kwargs.get('job_storage_account', None) - self.content_storage_policy = None - self.externals = kwargs.get('externals', None) + self.content_storage_policy = kwargs.get('content_storage_policy', None) self.cluster = kwargs.get('cluster', None) @@ -4523,26 +4142,6 @@ def __init__( self.next_link = None -class StreamingJobSku(msrest.serialization.Model): - """The properties that are associated with a SKU. - - :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values - include: "Standard". - :type name: str or ~stream_analytics_management_client.models.StreamingJobSkuName - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(StreamingJobSku, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - class StreamInputProperties(InputProperties): """The properties that are associated with an input containing stream data. @@ -4677,6 +4276,8 @@ class Transformation(SubResource): :vartype type: str :param streaming_units: Specifies the number of streaming units that the streaming job uses. :type streaming_units: int + :param valid_streaming_units: Specifies the valid streaming units a streaming job can scale to. + :type valid_streaming_units: list[int] :param query: Specifies the query that will be run in the streaming job. You can learn more about the Stream Analytics Query Language (SAQL) here: https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. @@ -4698,6 +4299,7 @@ class Transformation(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'valid_streaming_units': {'key': 'properties.validStreamingUnits', 'type': '[int]'}, 'query': {'key': 'properties.query', 'type': 'str'}, 'etag': {'key': 'properties.etag', 'type': 'str'}, } @@ -4708,5 +4310,6 @@ def __init__( ): super(Transformation, self).__init__(**kwargs) self.streaming_units = kwargs.get('streaming_units', None) + self.valid_streaming_units = kwargs.get('valid_streaming_units', None) self.query = kwargs.get('query', None) self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py index aa07f713986e..b044702bed6b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization @@ -15,122 +15,17 @@ from ._stream_analytics_management_client_enums import * -class FunctionProperties(msrest.serialization.Model): - """The properties that are associated with a function. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AggregateFunctionProperties, ScalarFunctionProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - _subtype_map = { - 'type': {'Aggregate': 'AggregateFunctionProperties', 'Scalar': 'ScalarFunctionProperties'} - } - - def __init__( - self, - *, - inputs: Optional[List["FunctionInput"]] = None, - output: Optional["FunctionOutput"] = None, - binding: Optional["FunctionBinding"] = None, - **kwargs - ): - super(FunctionProperties, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.etag = None - self.inputs = inputs - self.output = output - self.binding = binding - - -class AggregateFunctionProperties(FunctionProperties): - """The properties that are associated with an aggregate function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of function.Constant filled by server. - :type type: str - :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. - :vartype etag: str - :param inputs: - :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. - :type output: ~stream_analytics_management_client.models.FunctionOutput - :param binding: The physical binding of the function. For example, in the Azure Machine - Learning web service’s case, this describes the endpoint. - :type binding: ~stream_analytics_management_client.models.FunctionBinding - """ - - _validation = { - 'type': {'required': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[FunctionInput]'}, - 'output': {'key': 'properties.output', 'type': 'FunctionOutput'}, - 'binding': {'key': 'properties.binding', 'type': 'FunctionBinding'}, - } - - def __init__( - self, - *, - inputs: Optional[List["FunctionInput"]] = None, - output: Optional["FunctionOutput"] = None, - binding: Optional["FunctionBinding"] = None, - **kwargs - ): - super(AggregateFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) - self.type = 'Aggregate' # type: str - - class Serialization(msrest.serialization.Model): """Describes how data from an input is serialized or how data is serialized when written to an output. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, ParquetSerialization. + sub-classes are: AvroSerialization, CsvSerialization, JsonSerialization, ParquetSerialization. All required parameters must be populated in order to send to Azure. :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType """ @@ -143,7 +38,7 @@ class Serialization(msrest.serialization.Model): } _subtype_map = { - 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'CustomClr': 'CustomClrSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} + 'type': {'Avro': 'AvroSerialization', 'Csv': 'CsvSerialization', 'Json': 'JsonSerialization', 'Parquet': 'ParquetSerialization'} } def __init__( @@ -161,11 +56,11 @@ class AvroSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -180,7 +75,7 @@ class AvroSerialization(Serialization): def __init__( self, *, - properties: Optional[object] = None, + properties: Optional[Any] = None, **kwargs ): super(AvroSerialization, self).__init__(**kwargs) @@ -192,7 +87,7 @@ class OutputDataSource(msrest.serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. + sub-classes are: AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, AzureTableOutputDataSource, PowerBIOutputDataSource. All required parameters must be populated in order to send to Azure. @@ -210,7 +105,7 @@ class OutputDataSource(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.AzureFunction': 'AzureFunctionOutputDataSource', 'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} + 'type': {'Microsoft.DataLake/Accounts': 'AzureDataLakeStoreOutputDataSource', 'Microsoft.EventHub/EventHub': 'EventHubV2OutputDataSource', 'Microsoft.ServiceBus/EventHub': 'EventHubOutputDataSource', 'Microsoft.ServiceBus/Queue': 'ServiceBusQueueOutputDataSource', 'Microsoft.ServiceBus/Topic': 'ServiceBusTopicOutputDataSource', 'Microsoft.Sql/Server/DataWarehouse': 'AzureSynapseOutputDataSource', 'Microsoft.Sql/Server/Database': 'AzureSqlDatabaseOutputDataSource', 'Microsoft.Storage/Blob': 'BlobOutputDataSource', 'Microsoft.Storage/DocumentDB': 'DocumentDbOutputDataSource', 'Microsoft.Storage/Table': 'AzureTableOutputDataSource', 'PowerBI': 'PowerBIOutputDataSource'} } def __init__( @@ -421,67 +316,11 @@ def __init__( self.authentication_mode = authentication_mode -class AzureFunctionOutputDataSource(OutputDataSource): - """Defines the metadata of AzureFunctionOutputDataSource. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of data source output will be written to. Required on - PUT (CreateOrReplace) requests.Constant filled by server. - :type type: str - :param function_app_name: The name of your Azure Functions app. - :type function_app_name: str - :param function_name: The name of the function in your Azure Functions app. - :type function_name: str - :param api_key: If you want to use an Azure Function from another subscription, you can do so - by providing the key to access your function. - :type api_key: str - :param max_batch_size: A property that lets you set the maximum size for each output batch - that's sent to your Azure function. The input unit is in bytes. By default, this value is - 262,144 bytes (256 KB). - :type max_batch_size: float - :param max_batch_count: A property that lets you specify the maximum number of events in each - batch that's sent to Azure Functions. The default value is 100. - :type max_batch_count: float - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_name': {'key': 'properties.functionAppName', 'type': 'str'}, - 'function_name': {'key': 'properties.functionName', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'max_batch_size': {'key': 'properties.maxBatchSize', 'type': 'float'}, - 'max_batch_count': {'key': 'properties.maxBatchCount', 'type': 'float'}, - } - - def __init__( - self, - *, - function_app_name: Optional[str] = None, - function_name: Optional[str] = None, - api_key: Optional[str] = None, - max_batch_size: Optional[float] = None, - max_batch_count: Optional[float] = None, - **kwargs - ): - super(AzureFunctionOutputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.AzureFunction' # type: str - self.function_app_name = function_app_name - self.function_name = function_name - self.api_key = api_key - self.max_batch_size = max_batch_size - self.max_batch_count = max_batch_count - - class FunctionBinding(msrest.serialization.Model): """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, CSharpFunctionBinding, JavaScriptFunctionBinding. + sub-classes are: AzureMachineLearningWebServiceFunctionBinding, JavaScriptFunctionBinding. All required parameters must be populated in order to send to Azure. @@ -498,7 +337,7 @@ class FunctionBinding(msrest.serialization.Model): } _subtype_map = { - 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionBinding', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionBinding', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} + 'type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningWebServiceFunctionBinding', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionBinding'} } def __init__( @@ -509,7 +348,7 @@ def __init__( self.type = None # type: Optional[str] -class AzureMachineLearningServiceFunctionBinding(FunctionBinding): +class AzureMachineLearningWebServiceFunctionBinding(FunctionBinding): """The binding to an Azure Machine Learning web service. All required parameters must be populated in order to send to Azure. @@ -517,23 +356,20 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): :param type: Required. Indicates the function binding type.Constant filled by server. :type type: str :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type endpoint: str :param api_key: The API key used to authenticate with Request-Response endpoint. :type api_key: str :param inputs: The inputs for the Azure Machine Learning web service endpoint. - :type inputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputs :param outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceOutputColumn] + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceOutputColumn] :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. :type batch_size: int - :param number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :type number_of_parallel_requests: int """ _validation = { @@ -544,10 +380,9 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': '[AzureMachineLearningServiceInputColumn]'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningServiceOutputColumn]'}, + 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningWebServiceInputs'}, + 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningWebServiceOutputColumn]'}, 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - 'number_of_parallel_requests': {'key': 'properties.numberOfParallelRequests', 'type': 'int'}, } def __init__( @@ -555,27 +390,25 @@ def __init__( *, endpoint: Optional[str] = None, api_key: Optional[str] = None, - inputs: Optional[List["AzureMachineLearningServiceInputColumn"]] = None, - outputs: Optional[List["AzureMachineLearningServiceOutputColumn"]] = None, + inputs: Optional["AzureMachineLearningWebServiceInputs"] = None, + outputs: Optional[List["AzureMachineLearningWebServiceOutputColumn"]] = None, batch_size: Optional[int] = None, - number_of_parallel_requests: Optional[int] = None, **kwargs ): - super(AzureMachineLearningServiceFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearningServices' # type: str + super(AzureMachineLearningWebServiceFunctionBinding, self).__init__(**kwargs) + self.type = 'Microsoft.MachineLearning/WebService' # type: str self.endpoint = endpoint self.api_key = api_key self.inputs = inputs self.outputs = outputs self.batch_size = batch_size - self.number_of_parallel_requests = number_of_parallel_requests class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): """Parameters used to specify the type of function to retrieve the default definition for. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, CSharpFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. + sub-classes are: AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters. All required parameters must be populated in order to send to Azure. @@ -592,7 +425,7 @@ class FunctionRetrieveDefaultDefinitionParameters(msrest.serialization.Model): } _subtype_map = { - 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.MachineLearningServices': 'AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/CLRUdf': 'CSharpFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} + 'binding_type': {'Microsoft.MachineLearning/WebService': 'AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters', 'Microsoft.StreamAnalytics/JavascriptUdf': 'JavaScriptFunctionRetrieveDefaultDefinitionParameters'} } def __init__( @@ -603,25 +436,24 @@ def __init__( self.binding_type = None # type: Optional[str] -class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): +class AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. :type binding_type: str :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - web service. + web service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -630,25 +462,27 @@ class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters(Fun 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, execute_endpoint: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): - super(AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearningServices' # type: str + super(AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) + self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str self.execute_endpoint = execute_endpoint + self.udf_type = udf_type -class AzureMachineLearningServiceInputColumn(msrest.serialization.Model): +class AzureMachineLearningWebServiceInputColumn(msrest.serialization.Model): """Describes an input column for the Azure Machine Learning web service endpoint. :param name: The name of the input column. :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. + :param data_type: The (Azure Machine Learning supported) data type of the input column. A list + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str :param map_to: The zero based index of the function parameter this input maps to. :type map_to: int @@ -668,13 +502,13 @@ def __init__( map_to: Optional[int] = None, **kwargs ): - super(AzureMachineLearningServiceInputColumn, self).__init__(**kwargs) + super(AzureMachineLearningWebServiceInputColumn, self).__init__(**kwargs) self.name = name self.data_type = data_type self.map_to = map_to -class AzureMachineLearningServiceInputs(msrest.serialization.Model): +class AzureMachineLearningWebServiceInputs(msrest.serialization.Model): """The inputs for the Azure Machine Learning web service endpoint. :param name: The name of the input. This is the name provided while authoring the endpoint. @@ -682,221 +516,34 @@ class AzureMachineLearningServiceInputs(msrest.serialization.Model): :param column_names: A list of input columns for the Azure Machine Learning web service endpoint. :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningServiceInputColumn] + list[~stream_analytics_management_client.models.AzureMachineLearningWebServiceInputColumn] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningServiceInputColumn]'}, + 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningWebServiceInputColumn]'}, } def __init__( self, *, name: Optional[str] = None, - column_names: Optional[List["AzureMachineLearningServiceInputColumn"]] = None, + column_names: Optional[List["AzureMachineLearningWebServiceInputColumn"]] = None, **kwargs ): - super(AzureMachineLearningServiceInputs, self).__init__(**kwargs) + super(AzureMachineLearningWebServiceInputs, self).__init__(**kwargs) self.name = name self.column_names = column_names -class AzureMachineLearningServiceOutputColumn(msrest.serialization.Model): +class AzureMachineLearningWebServiceOutputColumn(msrest.serialization.Model): """Describes an output column for the Azure Machine Learning web service endpoint. - :param name: The name of the output column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the output column. - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - data_type: Optional[str] = None, - map_to: Optional[int] = None, - **kwargs - ): - super(AzureMachineLearningServiceOutputColumn, self).__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningStudioFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning Studio. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning- - consume-web-services#request-response-service-rrs. - :type endpoint: str - :param api_key: The API key used to authenticate with Request-Response endpoint. - :type api_key: str - :param inputs: The inputs for the Azure Machine Learning Studio endpoint. - :type inputs: ~stream_analytics_management_client.models.AzureMachineLearningStudioInputs - :param outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :type outputs: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioOutputColumn] - :param batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :type batch_size: int - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, - 'api_key': {'key': 'properties.apiKey', 'type': 'str'}, - 'inputs': {'key': 'properties.inputs', 'type': 'AzureMachineLearningStudioInputs'}, - 'outputs': {'key': 'properties.outputs', 'type': '[AzureMachineLearningStudioOutputColumn]'}, - 'batch_size': {'key': 'properties.batchSize', 'type': 'int'}, - } - - def __init__( - self, - *, - endpoint: Optional[str] = None, - api_key: Optional[str] = None, - inputs: Optional["AzureMachineLearningStudioInputs"] = None, - outputs: Optional[List["AzureMachineLearningStudioOutputColumn"]] = None, - batch_size: Optional[int] = None, - **kwargs - ): - super(AzureMachineLearningStudioFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.MachineLearning/WebService' # type: str - self.endpoint = endpoint - self.api_key = api_key - self.inputs = inputs - self.outputs = outputs - self.batch_size = batch_size - - -class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine- - learning-consume-web-services#request-response-service-rrs. - :type execute_endpoint: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'execute_endpoint': {'key': 'bindingRetrievalProperties.executeEndpoint', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - *, - execute_endpoint: Optional[str] = None, - **kwargs - ): - super(AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.MachineLearning/WebService' # type: str - self.execute_endpoint = execute_endpoint - - -class AzureMachineLearningStudioInputColumn(msrest.serialization.Model): - """Describes an input column for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input column. - :type name: str - :param data_type: The (Azure Machine Learning supported) data type of the input column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . - :type data_type: str - :param map_to: The zero based index of the function parameter this input maps to. - :type map_to: int - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'map_to': {'key': 'mapTo', 'type': 'int'}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - data_type: Optional[str] = None, - map_to: Optional[int] = None, - **kwargs - ): - super(AzureMachineLearningStudioInputColumn, self).__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningStudioInputs(msrest.serialization.Model): - """The inputs for the Azure Machine Learning Studio endpoint. - - :param name: The name of the input. This is the name provided while authoring the endpoint. - :type name: str - :param column_names: A list of input columns for the Azure Machine Learning Studio endpoint. - :type column_names: - list[~stream_analytics_management_client.models.AzureMachineLearningStudioInputColumn] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'column_names': {'key': 'columnNames', 'type': '[AzureMachineLearningStudioInputColumn]'}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - column_names: Optional[List["AzureMachineLearningStudioInputColumn"]] = None, - **kwargs - ): - super(AzureMachineLearningStudioInputs, self).__init__(**kwargs) - self.name = name - self.column_names = column_names - - -class AzureMachineLearningStudioOutputColumn(msrest.serialization.Model): - """Describes an output column for the Azure Machine Learning Studio endpoint. - :param name: The name of the output column. :type name: str :param data_type: The (Azure Machine Learning supported) data type of the output column. A list - of valid Azure Machine Learning data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn905923.aspx . + of valid Azure Machine Learning data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn905923.aspx . :type data_type: str """ @@ -912,7 +559,7 @@ def __init__( data_type: Optional[str] = None, **kwargs ): - super(AzureMachineLearningStudioOutputColumn, self).__init__(**kwargs) + super(AzureMachineLearningWebServiceOutputColumn, self).__init__(**kwargs) self.name = name self.data_type = data_type @@ -938,8 +585,8 @@ class AzureSqlDatabaseDataSourceProperties(msrest.serialization.Model): :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. + :param max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. :type max_writer_count: float :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". @@ -1007,8 +654,8 @@ class AzureSqlDatabaseOutputDataSource(OutputDataSource): :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. + :param max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. :type max_writer_count: float :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". @@ -1077,8 +724,8 @@ class AzureSqlDatabaseOutputDataSourceProperties(AzureSqlDatabaseDataSourcePrope :param max_batch_count: Max Batch count for write to Sql database, the default value is 10,000. Optional on PUT requests. :type max_batch_count: float - :param max_writer_count: Max Write r count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. + :param max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query + partition) are available. Optional on PUT requests. :type max_writer_count: float :param authentication_mode: Authentication Mode. Possible values include: "Msi", "UserToken", "ConnectionString". @@ -1153,34 +800,6 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): :param type: Required. Indicates the type of input data source containing reference data. Required on PUT (CreateOrReplace) requests.Constant filled by server. :type type: str - :param properties: - :type properties: - ~stream_analytics_management_client.models.AzureSqlReferenceInputDataSourceProperties - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'AzureSqlReferenceInputDataSourceProperties'}, - } - - def __init__( - self, - *, - properties: Optional["AzureSqlReferenceInputDataSourceProperties"] = None, - **kwargs - ): - super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) - self.type = 'Microsoft.Sql/Server/Database' # type: str - self.properties = properties - - -class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): - """AzureSqlReferenceInputDataSourceProperties. - :param server: This element is associated with the datasource element. This is the name of the server that contains the database that will be written to. :type server: str @@ -1196,10 +815,9 @@ class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): :param table: This element is associated with the datasource element. The name of the table in the Azure SQL database.. :type table: str - :param refresh_type: This element is associated with the datasource element. This element is of - enum type. It indicates what kind of data refresh option do we want to - use:Static/RefreshPeriodicallyWithFull/RefreshPeriodicallyWithDelta. - :type refresh_type: str + :param refresh_type: Indicates the type of data refresh option. Possible values include: + "Static", "RefreshPeriodicallyWithFull", "RefreshPeriodicallyWithDelta". + :type refresh_type: str or ~stream_analytics_management_client.models.RefreshType :param refresh_rate: This element is associated with the datasource element. This indicates how frequently the data will be fetched from the database. It is of DateTime format. :type refresh_rate: str @@ -1212,16 +830,21 @@ class AzureSqlReferenceInputDataSourceProperties(msrest.serialization.Model): :type delta_snapshot_query: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'server': {'key': 'server', 'type': 'str'}, - 'database': {'key': 'database', 'type': 'str'}, - 'user': {'key': 'user', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'table': {'key': 'table', 'type': 'str'}, - 'refresh_type': {'key': 'refreshType', 'type': 'str'}, - 'refresh_rate': {'key': 'refreshRate', 'type': 'str'}, - 'full_snapshot_query': {'key': 'fullSnapshotQuery', 'type': 'str'}, - 'delta_snapshot_query': {'key': 'deltaSnapshotQuery', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'properties.server', 'type': 'str'}, + 'database': {'key': 'properties.database', 'type': 'str'}, + 'user': {'key': 'properties.user', 'type': 'str'}, + 'password': {'key': 'properties.password', 'type': 'str'}, + 'table': {'key': 'properties.table', 'type': 'str'}, + 'refresh_type': {'key': 'properties.refreshType', 'type': 'str'}, + 'refresh_rate': {'key': 'properties.refreshRate', 'type': 'str'}, + 'full_snapshot_query': {'key': 'properties.fullSnapshotQuery', 'type': 'str'}, + 'delta_snapshot_query': {'key': 'properties.deltaSnapshotQuery', 'type': 'str'}, } def __init__( @@ -1232,13 +855,14 @@ def __init__( user: Optional[str] = None, password: Optional[str] = None, table: Optional[str] = None, - refresh_type: Optional[str] = None, + refresh_type: Optional[Union[str, "RefreshType"]] = None, refresh_rate: Optional[str] = None, full_snapshot_query: Optional[str] = None, delta_snapshot_query: Optional[str] = None, **kwargs ): - super(AzureSqlReferenceInputDataSourceProperties, self).__init__(**kwargs) + super(AzureSqlReferenceInputDataSource, self).__init__(**kwargs) + self.type = 'Microsoft.Sql/Server/Database' # type: str self.server = server self.database = database self.user = user @@ -1475,10 +1099,10 @@ class BlobDataSourceProperties(msrest.serialization.Model): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1531,10 +1155,10 @@ class BlobOutputDataSource(OutputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1594,10 +1218,10 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1651,10 +1275,10 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1708,10 +1332,10 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1792,10 +1416,10 @@ class BlobStreamInputDataSource(StreamInputDataSource): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1855,10 +1479,10 @@ class BlobStreamInputDataSourceProperties(BlobDataSourceProperties): :type container: str :param path_pattern: The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-input or https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for a more detailed explanation and - example. + input or output to the job. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more + detailed explanation and example. :type path_pattern: str :param date_format: The date format. Wherever {date} appears in pathPattern, the value of this property is used as the date format instead. @@ -1896,7 +1520,7 @@ def __init__( class Resource(msrest.serialization.Model): - """Resource. + """The base resource definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -2244,43 +1868,9 @@ class Compression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - *, - type: str, - **kwargs - ): - super(Compression, self).__init__(**kwargs) - self.type = type - - -class CSharpFunctionBinding(FunctionBinding): - """The binding to a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the function binding type.Constant filled by server. - :type type: str - :param script: The Csharp code containing a single function definition. - :type script: str - :param dll_path: The Csharp code containing a single function definition. - :type dll_path: str - :param class_property: The Csharp code containing a single function definition. - :type class_property: str - :param method: The Csharp code containing a single function definition. - :type method: str + :param type: Required. Indicates the type of compression that the input uses. Required on PUT + (CreateOrReplace) requests. Possible values include: "None", "GZip", "Deflate". + :type type: str or ~stream_analytics_management_client.models.CompressionType """ _validation = { @@ -2289,124 +1879,37 @@ class CSharpFunctionBinding(FunctionBinding): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'script': {'key': 'properties.script', 'type': 'str'}, - 'dll_path': {'key': 'properties.dllPath', 'type': 'str'}, - 'class_property': {'key': 'properties.class', 'type': 'str'}, - 'method': {'key': 'properties.method', 'type': 'str'}, } def __init__( self, *, - script: Optional[str] = None, - dll_path: Optional[str] = None, - class_property: Optional[str] = None, - method: Optional[str] = None, - **kwargs - ): - super(CSharpFunctionBinding, self).__init__(**kwargs) - self.type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = script - self.dll_path = dll_path - self.class_property = class_property - self.method = method - - -class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a CSharp function. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param binding_type: Required. Indicates the function binding type.Constant filled by server. - :type binding_type: str - :param script: The CSharp code containing a single function definition. - :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str - """ - - _validation = { - 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, - } - - _attribute_map = { - 'binding_type': {'key': 'bindingType', 'type': 'str'}, - 'script': {'key': 'bindingRetrievalProperties.script', 'type': 'str'}, - 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, - } - - udf_type = "Scalar" - - def __init__( - self, - *, - script: Optional[str] = None, - **kwargs - ): - super(CSharpFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) - self.binding_type = 'Microsoft.StreamAnalytics/CLRUdf' # type: str - self.script = script - - -class CsvSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Indicates the type of serialization that the input or output uses. - Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". - :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated - value (CSV) records. See https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream- - analytics-input or https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics- - output for a list of supported values. Required on PUT (CreateOrReplace) requests. - :type field_delimiter: str - :param encoding: Specifies the encoding of the incoming data in the case of input and the - encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. - Possible values include: "UTF8". - :type encoding: str or ~stream_analytics_management_client.models.Encoding - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, - 'encoding': {'key': 'properties.encoding', 'type': 'str'}, - } - - def __init__( - self, - *, - field_delimiter: Optional[str] = None, - encoding: Optional[Union[str, "Encoding"]] = None, - **kwargs - ): - super(CsvSerialization, self).__init__(**kwargs) - self.type = 'Csv' # type: str - self.field_delimiter = field_delimiter - self.encoding = encoding + type: Union[str, "CompressionType"], + **kwargs + ): + super(Compression, self).__init__(**kwargs) + self.type = type -class CustomClrSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. +class CsvSerialization(Serialization): + """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. All required parameters must be populated in order to send to Azure. :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType - :param serialization_dll_path: The serialization library path. - :type serialization_dll_path: str - :param serialization_class_name: The serialization class name. - :type serialization_class_name: str + :param field_delimiter: Specifies the delimiter that will be used to separate comma-separated + value (CSV) records. See + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a list of + supported values. Required on PUT (CreateOrReplace) requests. + :type field_delimiter: str + :param encoding: Specifies the encoding of the incoming data in the case of input and the + encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. + Possible values include: "UTF8". + :type encoding: str or ~stream_analytics_management_client.models.Encoding """ _validation = { @@ -2415,21 +1918,21 @@ class CustomClrSerialization(Serialization): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'serialization_dll_path': {'key': 'properties.serializationDllPath', 'type': 'str'}, - 'serialization_class_name': {'key': 'properties.serializationClassName', 'type': 'str'}, + 'field_delimiter': {'key': 'properties.fieldDelimiter', 'type': 'str'}, + 'encoding': {'key': 'properties.encoding', 'type': 'str'}, } def __init__( self, *, - serialization_dll_path: Optional[str] = None, - serialization_class_name: Optional[str] = None, + field_delimiter: Optional[str] = None, + encoding: Optional[Union[str, "Encoding"]] = None, **kwargs ): - super(CustomClrSerialization, self).__init__(**kwargs) - self.type = 'CustomClr' # type: str - self.serialization_dll_path = serialization_dll_path - self.serialization_class_name = serialization_class_name + super(CsvSerialization, self).__init__(**kwargs) + self.type = 'Csv' # type: str + self.field_delimiter = field_delimiter + self.encoding = encoding class DiagnosticCondition(msrest.serialization.Model): @@ -2514,9 +2017,9 @@ class DocumentDbOutputDataSource(OutputDataSource): :type database: str :param collection_name_pattern: The collection name pattern for the collections to be used. The collection name format can be constructed using the optional {partition} token, where - partitions start from 0. See the DocumentDB section of https://docs.microsoft.com/en- - us/rest/api/streamanalytics/stream-analytics-output for more information. Required on PUT - (CreateOrReplace) requests. + partitions start from 0. See the DocumentDB section of + https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for more + information. Required on PUT (CreateOrReplace) requests. :type collection_name_pattern: str :param partition_key: The name of the field in output events used to specify the key for partitioning output across collections. If 'collectionNamePattern' contains the {partition} @@ -2566,88 +2069,88 @@ class Error(msrest.serialization.Model): """Common error representation. :param error: Error definition properties. - :type error: ~stream_analytics_management_client.models.ErrorAutoGenerated + :type error: ~stream_analytics_management_client.models.ErrorError """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorAutoGenerated'}, + 'error': {'key': 'error', 'type': 'ErrorError'}, } def __init__( self, *, - error: Optional["ErrorAutoGenerated"] = None, + error: Optional["ErrorError"] = None, **kwargs ): super(Error, self).__init__(**kwargs) self.error = error -class ErrorAutoGenerated(msrest.serialization.Model): - """Error definition properties. +class ErrorDetails(msrest.serialization.Model): + """Common error details representation. :param code: Error code. :type code: str - :param message: Error message. - :type message: str :param target: Error target. :type target: str - :param details: Error details. - :type details: list[~stream_analytics_management_client.models.ErrorDetails] + :param message: Error message. + :type message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetails]'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, code: Optional[str] = None, - message: Optional[str] = None, target: Optional[str] = None, - details: Optional[List["ErrorDetails"]] = None, + message: Optional[str] = None, **kwargs ): - super(ErrorAutoGenerated, self).__init__(**kwargs) + super(ErrorDetails, self).__init__(**kwargs) self.code = code - self.message = message self.target = target - self.details = details + self.message = message -class ErrorDetails(msrest.serialization.Model): - """Common error details representation. +class ErrorError(msrest.serialization.Model): + """Error definition properties. :param code: Error code. :type code: str - :param target: Error target. - :type target: str :param message: Error message. :type message: str + :param target: Error target. + :type target: str + :param details: Error details. + :type details: list[~stream_analytics_management_client.models.ErrorDetails] """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetails]'}, } def __init__( self, *, code: Optional[str] = None, - target: Optional[str] = None, message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorDetails"]] = None, **kwargs ): - super(ErrorDetails, self).__init__(**kwargs) + super(ErrorError, self).__init__(**kwargs) self.code = code - self.target = target self.message = message + self.target = target + self.details = details class ErrorResponse(msrest.serialization.Model): @@ -2786,7 +2289,7 @@ class EventHubOutputDataSource(OutputDataSource): :param partition_key: The key/column that is used to determine to which partition to send event data. :type partition_key: str - :param property_columns: + :param property_columns: The properties associated with this Event Hub output. :type property_columns: list[str] """ @@ -2848,7 +2351,7 @@ class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): :param partition_key: The key/column that is used to determine to which partition to send event data. :type partition_key: str - :param property_columns: + :param property_columns: The properties associated with this Event Hub output. :type property_columns: list[str] """ @@ -3016,7 +2519,7 @@ class EventHubV2OutputDataSource(OutputDataSource): :param partition_key: The key/column that is used to determine to which partition to send event data. :type partition_key: str - :param property_columns: + :param property_columns: The properties associated with this Event Hub output. :type property_columns: list[str] """ @@ -3122,37 +2625,6 @@ def __init__( self.consumer_group_name = consumer_group_name -class External(msrest.serialization.Model): - """The storage account where the custom code artifacts are located. - - :param storage_account: The properties that are associated with an Azure Storage account. - :type storage_account: ~stream_analytics_management_client.models.StorageAccount - :param container: - :type container: str - :param path: - :type path: str - """ - - _attribute_map = { - 'storage_account': {'key': 'storageAccount', 'type': 'StorageAccount'}, - 'container': {'key': 'container', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - *, - storage_account: Optional["StorageAccount"] = None, - container: Optional[str] = None, - path: Optional[str] = None, - **kwargs - ): - super(External, self).__init__(**kwargs) - self.storage_account = storage_account - self.container = container - self.path = path - - class SubResource(msrest.serialization.Model): """The base sub-resource model definition. @@ -3290,8 +2762,8 @@ class FunctionOutput(msrest.serialization.Model): """Describes the output of a function. :param data_type: The (Azure Stream Analytics supported) data type of the function output. A - list of valid Azure Stream Analytics data types are described at https://msdn.microsoft.com/en- - us/library/azure/dn835065.aspx. + list of valid Azure Stream Analytics data types are described at + https://msdn.microsoft.com/en-us/library/azure/dn835065.aspx. :type data_type: str """ @@ -3309,14 +2781,55 @@ def __init__( self.data_type = data_type +class FunctionProperties(msrest.serialization.Model): + """The properties that are associated with a function. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ScalarFunctionProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Indicates the type of function.Constant filled by server. + :type type: str + :ivar etag: The current entity tag for the function. This is an opaque string. You can use it + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. + :vartype etag: str + """ + + _validation = { + 'type': {'required': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Scalar': 'ScalarFunctionProperties'} + } + + def __init__( + self, + **kwargs + ): + super(FunctionProperties, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.etag = None + + class Identity(msrest.serialization.Model): """Describes how identity is verified. - :param tenant_id: + :param tenant_id: The identity tenantId. :type tenant_id: str - :param principal_id: + :param principal_id: The identity principal ID. :type principal_id: str - :param type: + :param type: The identity type. :type type: str """ @@ -3568,8 +3081,6 @@ def __init__( class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): """The parameters needed to retrieve the default function definition for a JavaScript function. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param binding_type: Required. Indicates the function binding type.Constant filled by server. @@ -3577,13 +3088,13 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa :param script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. :type script: str - :ivar udf_type: The function type. Default value: "Scalar". - :vartype udf_type: str + :param udf_type: The function type. The only acceptable values to pass in are None and + "Scalar". The default value is None. + :type udf_type: str """ _validation = { 'binding_type': {'required': True}, - 'udf_type': {'constant': True}, } _attribute_map = { @@ -3592,17 +3103,17 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa 'udf_type': {'key': 'bindingRetrievalProperties.udfType', 'type': 'str'}, } - udf_type = "Scalar" - def __init__( self, *, script: Optional[str] = None, + udf_type: Optional[str] = None, **kwargs ): super(JavaScriptFunctionRetrieveDefaultDefinitionParameters, self).__init__(**kwargs) self.binding_type = 'Microsoft.StreamAnalytics/JavascriptUdf' # type: str self.script = script + self.udf_type = udf_type class StorageAccount(msrest.serialization.Model): @@ -3672,7 +3183,7 @@ class JsonSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -3717,6 +3228,8 @@ class Operation(msrest.serialization.Model): :ivar name: The name of the operation being performed on this particular object. :vartype name: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool :ivar display: Contains the localized display information for this particular operation / action. :vartype display: ~stream_analytics_management_client.models.OperationDisplay @@ -3729,15 +3242,19 @@ class Operation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } def __init__( self, + *, + is_data_action: Optional[bool] = None, **kwargs ): super(Operation, self).__init__(**kwargs) self.name = None + self.is_data_action = is_data_action self.display = None @@ -3827,9 +3344,9 @@ class Output(SubResource): :param datasource: Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests. :type datasource: ~stream_analytics_management_client.models.OutputDataSource - :param time_window: + :param time_window: The time frame for filtering Stream Analytics job outputs. :type time_window: str - :param size_window: + :param size_window: The size window to constrain a Stream Analytics output to. :type size_window: float :param serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. @@ -3918,11 +3435,11 @@ class ParquetSerialization(Serialization): :param type: Required. Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests.Constant filled by server. Possible values include: - "Csv", "Avro", "Json", "CustomClr", "Parquet". + "Csv", "Avro", "Json", "Parquet". :type type: str or ~stream_analytics_management_client.models.EventSerializationType :param properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. - :type properties: object + :type properties: any """ _validation = { @@ -3937,7 +3454,7 @@ class ParquetSerialization(Serialization): def __init__( self, *, - properties: Optional[object] = None, + properties: Optional[Any] = None, **kwargs ): super(ParquetSerialization, self).__init__(**kwargs) @@ -4089,7 +3606,41 @@ def __init__( self.authentication_mode = authentication_mode -class PrivateEndpoint(Resource): +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class PrivateEndpoint(ProxyResource): """Complete information about the private endpoint. Variables are only populated by the server, and will be ignored when sending a request. @@ -4239,21 +3790,27 @@ def __init__( class PrivateLinkServiceConnection(msrest.serialization.Model): """A grouping of information about the connection to the remote resource. + Variables are only populated by the server, and will be ignored when sending a request. + :param private_link_service_id: The resource id of the private link service. Required on PUT (CreateOrUpdate) requests. :type private_link_service_id: str :param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. :type group_ids: list[str] - :param request_message: A message passed to the owner of the remote resource with this + :ivar request_message: A message passed to the owner of the remote resource with this connection request. Restricted to 140 chars. - :type request_message: str + :vartype request_message: str :param private_link_service_connection_state: A collection of read-only information about the state of the connection to the private remote resource. :type private_link_service_connection_state: ~stream_analytics_management_client.models.PrivateLinkConnectionState """ + _validation = { + 'request_message': {'readonly': True}, + } + _attribute_map = { 'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'}, 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, @@ -4266,51 +3823,16 @@ def __init__( *, private_link_service_id: Optional[str] = None, group_ids: Optional[List[str]] = None, - request_message: Optional[str] = None, private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, **kwargs ): super(PrivateLinkServiceConnection, self).__init__(**kwargs) self.private_link_service_id = private_link_service_id self.group_ids = group_ids - self.request_message = request_message + self.request_message = None self.private_link_service_connection_state = private_link_service_connection_state -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyResource, self).__init__(**kwargs) - - class ReferenceInputProperties(InputProperties): """The properties that are associated with an input containing reference data. @@ -4411,12 +3933,12 @@ class ScalarFunctionProperties(FunctionProperties): :param type: Required. Indicates the type of function.Constant filled by server. :type type: str :ivar etag: The current entity tag for the function. This is an opaque string. You can use it - to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :param inputs: + :param inputs: A list of inputs describing the parameters of the function. :type inputs: list[~stream_analytics_management_client.models.FunctionInput] - :param output: Describes the output of a function. + :param output: The output of the function. :type output: ~stream_analytics_management_client.models.FunctionOutput :param binding: The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. @@ -4444,8 +3966,33 @@ def __init__( binding: Optional["FunctionBinding"] = None, **kwargs ): - super(ScalarFunctionProperties, self).__init__(inputs=inputs, output=output, binding=binding, **kwargs) + super(ScalarFunctionProperties, self).__init__(**kwargs) self.type = 'Scalar' # type: str + self.inputs = inputs + self.output = output + self.binding = binding + + +class ScaleStreamingJobParameters(msrest.serialization.Model): + """Parameters supplied to the Scale Streaming Job operation. + + :param streaming_units: Specifies the number of streaming units that the streaming job will + scale to. + :type streaming_units: int + """ + + _attribute_map = { + 'streaming_units': {'key': 'streamingUnits', 'type': 'int'}, + } + + def __init__( + self, + *, + streaming_units: Optional[int] = None, + **kwargs + ): + super(ScaleStreamingJobParameters, self).__init__(**kwargs) + self.streaming_units = streaming_units class ServiceBusQueueOutputDataSource(OutputDataSource): @@ -4474,8 +4021,11 @@ class ServiceBusQueueOutputDataSource(OutputDataSource): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :param system_property_columns: The system properties associated with the Service Bus Queue. + The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :type system_property_columns: any """ _validation = { @@ -4490,7 +4040,7 @@ class ServiceBusQueueOutputDataSource(OutputDataSource): 'authentication_mode': {'key': 'properties.authenticationMode', 'type': 'str'}, 'queue_name': {'key': 'properties.queueName', 'type': 'str'}, 'property_columns': {'key': 'properties.propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': '{str}'}, + 'system_property_columns': {'key': 'properties.systemPropertyColumns', 'type': 'object'}, } def __init__( @@ -4502,7 +4052,7 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, queue_name: Optional[str] = None, property_columns: Optional[List[str]] = None, - system_property_columns: Optional[Dict[str, str]] = None, + system_property_columns: Optional[Any] = None, **kwargs ): super(ServiceBusQueueOutputDataSource, self).__init__(**kwargs) @@ -4537,8 +4087,11 @@ class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. - :type system_property_columns: dict[str, str] + :param system_property_columns: The system properties associated with the Service Bus Queue. + The following system properties are supported: ReplyToSessionId, ContentType, To, Subject, + CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, ReplyTo, + Label, ScheduledEnqueueTimeUtc. + :type system_property_columns: any """ _attribute_map = { @@ -4548,7 +4101,7 @@ class ServiceBusQueueOutputDataSourceProperties(ServiceBusDataSourceProperties): 'authentication_mode': {'key': 'authenticationMode', 'type': 'str'}, 'queue_name': {'key': 'queueName', 'type': 'str'}, 'property_columns': {'key': 'propertyColumns', 'type': '[str]'}, - 'system_property_columns': {'key': 'systemPropertyColumns', 'type': '{str}'}, + 'system_property_columns': {'key': 'systemPropertyColumns', 'type': 'object'}, } def __init__( @@ -4560,7 +4113,7 @@ def __init__( authentication_mode: Optional[Union[str, "AuthenticationMode"]] = None, queue_name: Optional[str] = None, property_columns: Optional[List[str]] = None, - system_property_columns: Optional[Dict[str, str]] = None, + system_property_columns: Optional[Any] = None, **kwargs ): super(ServiceBusQueueOutputDataSourceProperties, self).__init__(service_bus_namespace=service_bus_namespace, shared_access_policy_name=shared_access_policy_name, shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, **kwargs) @@ -4595,7 +4148,10 @@ class ServiceBusTopicOutputDataSource(OutputDataSource): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. + :param system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. :type system_property_columns: dict[str, str] """ @@ -4658,7 +4214,10 @@ class ServiceBusTopicOutputDataSourceProperties(ServiceBusDataSourceProperties): :param property_columns: A string array of the names of output columns to be attached to Service Bus messages as custom properties. :type property_columns: list[str] - :param system_property_columns: Dictionary of :code:``. + :param system_property_columns: The system properties associated with the Service Bus Topic + Output. The following system properties are supported: ReplyToSessionId, ContentType, To, + Subject, CorrelationId, TimeToLive, PartitionKey, SessionId, ScheduledEnqueueTime, MessageId, + ReplyTo, Label, ScheduledEnqueueTimeUtc. :type system_property_columns: dict[str, str] """ @@ -4690,6 +4249,28 @@ def __init__( self.system_property_columns = system_property_columns +class Sku(msrest.serialization.Model): + """The properties that are associated with a SKU. + + :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values + include: "Standard". + :type name: str or ~stream_analytics_management_client.models.SkuName + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[Union[str, "SkuName"]] = None, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = name + + class StartStreamingJobParameters(msrest.serialization.Model): """Parameters supplied to the Start Streaming Job operation. @@ -4744,7 +4325,7 @@ class StreamingJob(TrackedResource): be used to authenticate with inputs and outputs. :type identity: ~stream_analytics_management_client.models.Identity :param sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. - :type sku: ~stream_analytics_management_client.models.StreamingJobSku + :type sku: ~stream_analytics_management_client.models.Sku :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. :vartype job_id: str @@ -4789,12 +4370,12 @@ class StreamingJob(TrackedResource): have a value of -1. :type events_late_arrival_max_delay_in_seconds: int :param data_locale: The data locale of the stream analytics job. Value should be the name of a - supported .NET Culture from the set https://msdn.microsoft.com/en- - us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none - specified. + supported .NET Culture from the set + https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. + Defaults to 'en-US' if none specified. :type data_locale: str :param compatibility_level: Controls certain runtime behaviors of the streaming job. Possible - values include: "1.0". + values include: "1.0", "1.2". :type compatibility_level: str or ~stream_analytics_management_client.models.CompatibilityLevel :ivar created_date: Value is an ISO-8601 formatted UTC timestamp indicating when the streaming job was created. @@ -4818,19 +4399,17 @@ class StreamingJob(TrackedResource): transformation. :type functions: list[~stream_analytics_management_client.models.Function] :ivar etag: The current entity tag for the streaming job. This is an opaque string. You can use - it to detect whether the resource has changed between requests. You can also use it in the If- - Match or If-None-Match headers for write operations for optimistic concurrency. + it to detect whether the resource has changed between requests. You can also use it in the + If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str :param job_storage_account: The properties that are associated with an Azure Storage account with MSI. :type job_storage_account: ~stream_analytics_management_client.models.JobStorageAccount - :ivar content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to + :param content_storage_policy: Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Possible values include: "SystemAccount", "JobStorageAccount". - :vartype content_storage_policy: str or + :type content_storage_policy: str or ~stream_analytics_management_client.models.ContentStoragePolicy - :param externals: The storage account where the custom code artifacts are located. - :type externals: ~stream_analytics_management_client.models.External :param cluster: The cluster which streaming jobs will run on. :type cluster: ~stream_analytics_management_client.models.ClusterInfo """ @@ -4845,7 +4424,6 @@ class StreamingJob(TrackedResource): 'last_output_event_time': {'readonly': True}, 'created_date': {'readonly': True}, 'etag': {'readonly': True}, - 'content_storage_policy': {'readonly': True}, } _attribute_map = { @@ -4855,7 +4433,7 @@ class StreamingJob(TrackedResource): 'tags': {'key': 'tags', 'type': '{str}'}, 'location': {'key': 'location', 'type': 'str'}, 'identity': {'key': 'identity', 'type': 'Identity'}, - 'sku': {'key': 'properties.sku', 'type': 'StreamingJobSku'}, + 'sku': {'key': 'properties.sku', 'type': 'Sku'}, 'job_id': {'key': 'properties.jobId', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'job_state': {'key': 'properties.jobState', 'type': 'str'}, @@ -4877,7 +4455,6 @@ class StreamingJob(TrackedResource): 'etag': {'key': 'properties.etag', 'type': 'str'}, 'job_storage_account': {'key': 'properties.jobStorageAccount', 'type': 'JobStorageAccount'}, 'content_storage_policy': {'key': 'properties.contentStoragePolicy', 'type': 'str'}, - 'externals': {'key': 'properties.externals', 'type': 'External'}, 'cluster': {'key': 'properties.cluster', 'type': 'ClusterInfo'}, } @@ -4887,7 +4464,7 @@ def __init__( tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, identity: Optional["Identity"] = None, - sku: Optional["StreamingJobSku"] = None, + sku: Optional["Sku"] = None, job_type: Optional[Union[str, "JobType"]] = None, output_start_mode: Optional[Union[str, "OutputStartMode"]] = None, output_start_time: Optional[datetime.datetime] = None, @@ -4902,7 +4479,7 @@ def __init__( outputs: Optional[List["Output"]] = None, functions: Optional[List["Function"]] = None, job_storage_account: Optional["JobStorageAccount"] = None, - externals: Optional["External"] = None, + content_storage_policy: Optional[Union[str, "ContentStoragePolicy"]] = None, cluster: Optional["ClusterInfo"] = None, **kwargs ): @@ -4929,8 +4506,7 @@ def __init__( self.functions = functions self.etag = None self.job_storage_account = job_storage_account - self.content_storage_policy = None - self.externals = externals + self.content_storage_policy = content_storage_policy self.cluster = cluster @@ -4964,28 +4540,6 @@ def __init__( self.next_link = None -class StreamingJobSku(msrest.serialization.Model): - """The properties that are associated with a SKU. - - :param name: The name of the SKU. Required on PUT (CreateOrReplace) requests. Possible values - include: "Standard". - :type name: str or ~stream_analytics_management_client.models.StreamingJobSkuName - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - *, - name: Optional[Union[str, "StreamingJobSkuName"]] = None, - **kwargs - ): - super(StreamingJobSku, self).__init__(**kwargs) - self.name = name - - class StreamInputProperties(InputProperties): """The properties that are associated with an input containing stream data. @@ -5127,6 +4681,8 @@ class Transformation(SubResource): :vartype type: str :param streaming_units: Specifies the number of streaming units that the streaming job uses. :type streaming_units: int + :param valid_streaming_units: Specifies the valid streaming units a streaming job can scale to. + :type valid_streaming_units: list[int] :param query: Specifies the query that will be run in the streaming job. You can learn more about the Stream Analytics Query Language (SAQL) here: https://msdn.microsoft.com/library/azure/dn834998 . Required on PUT (CreateOrReplace) requests. @@ -5148,6 +4704,7 @@ class Transformation(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'streaming_units': {'key': 'properties.streamingUnits', 'type': 'int'}, + 'valid_streaming_units': {'key': 'properties.validStreamingUnits', 'type': '[int]'}, 'query': {'key': 'properties.query', 'type': 'str'}, 'etag': {'key': 'properties.etag', 'type': 'str'}, } @@ -5157,10 +4714,12 @@ def __init__( *, name: Optional[str] = None, streaming_units: Optional[int] = None, + valid_streaming_units: Optional[List[int]] = None, query: Optional[str] = None, **kwargs ): super(Transformation, self).__init__(name=name, **kwargs) self.streaming_units = streaming_units + self.valid_streaming_units = valid_streaming_units self.query = query self.etag = None diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py index 4ddfdea9c290..ed875d3c114d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -39,22 +39,37 @@ class ClusterProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu Canceled """ - SUCCEEDED = "Succeeded" #: The cluster provisioning succeeded. - FAILED = "Failed" #: The cluster provisioning failed. - CANCELED = "Canceled" #: The cluster provisioning was canceled. - IN_PROGRESS = "InProgress" #: The cluster provisioning was inprogress. + #: The cluster provisioning succeeded. + SUCCEEDED = "Succeeded" + #: The cluster provisioning failed. + FAILED = "Failed" + #: The cluster provisioning was canceled. + CANCELED = "Canceled" + #: The cluster provisioning was inprogress. + IN_PROGRESS = "InProgress" class ClusterSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. """ - DEFAULT = "Default" #: The default SKU. + #: The default SKU. + DEFAULT = "Default" class CompatibilityLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Controls certain runtime behaviors of the streaming job. """ ONE0 = "1.0" + ONE2 = "1.2" + +class CompressionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the type of compression that the input uses. Required on PUT (CreateOrReplace) + requests. + """ + + NONE = "None" + G_ZIP = "GZip" + DEFLATE = "Deflate" class ContentStoragePolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this @@ -79,7 +94,6 @@ class EventSerializationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) CSV = "Csv" AVRO = "Avro" JSON = "Json" - CUSTOM_CLR = "CustomClr" PARQUET = "Parquet" class EventsOutOfOrderPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): @@ -93,16 +107,26 @@ class JobState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The current execution state of the streaming job. """ - CREATED = "Created" #: The job is currently in the Created state. - STARTING = "Starting" #: The job is currently in the Starting state. - RUNNING = "Running" #: The job is currently in the Running state. - STOPPING = "Stopping" #: The job is currently in the Stopping state. - STOPPED = "Stopped" #: The job is currently in the Stopped state. - DELETING = "Deleting" #: The job is currently in the Deleting state. - FAILED = "Failed" #: The job is currently in the Failed state. - DEGRADED = "Degraded" #: The job is currently in the Degraded state. - RESTARTING = "Restarting" #: The job is currently in the Restarting state. - SCALING = "Scaling" #: The job is currently in the Scaling state. + #: The job is currently in the Created state. + CREATED = "Created" + #: The job is currently in the Starting state. + STARTING = "Starting" + #: The job is currently in the Running state. + RUNNING = "Running" + #: The job is currently in the Stopping state. + STOPPING = "Stopping" + #: The job is currently in the Stopped state. + STOPPED = "Stopped" + #: The job is currently in the Deleting state. + DELETING = "Deleting" + #: The job is currently in the Failed state. + FAILED = "Failed" + #: The job is currently in the Degraded state. + DEGRADED = "Degraded" + #: The job is currently in the Restarting state. + RESTARTING = "Restarting" + #: The job is currently in the Scaling state. + SCALING = "Scaling" class JobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. @@ -141,7 +165,15 @@ class OutputStartMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CUSTOM_TIME = "CustomTime" LAST_OUTPUT_EVENT_TIME = "LastOutputEventTime" -class StreamingJobSkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class RefreshType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates the type of data refresh option. + """ + + STATIC = "Static" + REFRESH_PERIODICALLY_WITH_FULL = "RefreshPeriodicallyWithFull" + REFRESH_PERIODICALLY_WITH_DELTA = "RefreshPeriodicallyWithDelta" + +class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The name of the SKU. Required on PUT (CreateOrReplace) requests. """ diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py index a247559efb05..72cfdc41ec92 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py @@ -6,24 +6,24 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations +from ._operations import Operations +from ._streaming_jobs_operations import StreamingJobsOperations from ._inputs_operations import InputsOperations from ._outputs_operations import OutputsOperations -from ._streaming_jobs_operations import StreamingJobsOperations -from ._subscriptions_operations import SubscriptionsOperations from ._transformations_operations import TransformationsOperations -from ._operations import Operations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations __all__ = [ - 'FunctionsOperations', + 'Operations', + 'StreamingJobsOperations', 'InputsOperations', 'OutputsOperations', - 'StreamingJobsOperations', - 'SubscriptionsOperations', 'TransformationsOperations', - 'Operations', + 'FunctionsOperations', + 'SubscriptionsOperations', 'ClustersOperations', 'PrivateEndpointsOperations', ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py index 2b0d23f60677..1a999a94c6f2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class ClustersOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -51,18 +51,18 @@ def _create_or_update_initial( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Cluster" - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + # type: (...) -> "_models.Cluster" + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -97,7 +97,7 @@ def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -116,12 +116,12 @@ def begin_create_or_update( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.Cluster"] + # type: (...) -> LROPoller["_models.Cluster"] """Creates a Stream Analytics Cluster or replaces an already existing cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -140,8 +140,8 @@ def begin_create_or_update( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) @@ -149,7 +149,7 @@ def begin_create_or_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -176,7 +176,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -194,17 +200,17 @@ def _update_initial( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Optional["models.Cluster"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + # type: (...) -> Optional["_models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -237,7 +243,7 @@ def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -254,11 +260,11 @@ def begin_update( self, resource_group_name, # type: str cluster_name, # type: str - cluster, # type: "models.Cluster" + cluster, # type: "_models.Cluster" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.Cluster"] + # type: (...) -> LROPoller["_models.Cluster"] """Updates an existing cluster. This can be used to partially update (ie. update one or two properties) a cluster without affecting the rest of the cluster definition. @@ -275,8 +281,8 @@ def begin_update( :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Cluster or the result of cls(response) @@ -284,7 +290,7 @@ def begin_update( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -310,7 +316,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -330,7 +342,7 @@ def get( cluster_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Cluster" + # type: (...) -> "_models.Cluster" """Gets information about the specified cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -342,12 +354,12 @@ def get( :rtype: ~stream_analytics_management_client.models.Cluster :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Cluster"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -373,7 +385,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Cluster', pipeline_response) @@ -396,7 +408,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -422,7 +434,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -445,8 +457,8 @@ def begin_delete( :type cluster_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -475,7 +487,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -493,7 +511,7 @@ def list_by_subscription( self, **kwargs # type: Any ): - # type: (...) -> Iterable["models.ClusterListResult"] + # type: (...) -> Iterable["_models.ClusterListResult"] """Lists all of the clusters in the given subscription. :keyword callable cls: A custom type or function that will be passed the direct response @@ -501,12 +519,12 @@ def list_by_subscription( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -546,7 +564,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -562,7 +580,7 @@ def list_by_resource_group( resource_group_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["models.ClusterListResult"] + # type: (...) -> Iterable["_models.ClusterListResult"] """Lists all of the clusters in the given resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -572,12 +590,12 @@ def list_by_resource_group( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -618,7 +636,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -635,7 +653,7 @@ def list_streaming_jobs( cluster_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["models.ClusterJobListResult"] + # type: (...) -> Iterable["_models.ClusterJobListResult"] """Lists all of the streaming jobs in the given cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -647,12 +665,12 @@ def list_streaming_jobs( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.ClusterJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ClusterJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -694,7 +712,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py index 31063c85850b..79af3ee0e2cb 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class FunctionsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function, # type: "models.Function" + function, # type: "_models.Function" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Creates a function or replaces an already existing function under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,12 +81,12 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -122,7 +122,8 @@ def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -144,11 +145,11 @@ def update( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function, # type: "models.Function" + function, # type: "_models.Function" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Updates an existing function under an existing streaming job. This can be used to partially update (ie. update one or two properties) a function without affecting the rest the job or function definition. @@ -173,12 +174,12 @@ def update( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -212,7 +213,8 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -250,7 +252,8 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -268,6 +271,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -275,7 +279,8 @@ def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -289,7 +294,7 @@ def get( function_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Gets details about the specified function. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -303,12 +308,12 @@ def get( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -335,7 +340,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -354,7 +360,7 @@ def list_by_streaming_job( select=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.FunctionListResult"] + # type: (...) -> Iterable["_models.FunctionListResult"] """Lists all of the functions under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -370,12 +376,12 @@ def list_by_streaming_job( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.FunctionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FunctionListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.FunctionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -419,8 +425,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -434,16 +441,16 @@ def _test_initial( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function=None, # type: Optional["models.Function"] + function=None, # type: Optional["_models.Function"] **kwargs # type: Any ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + # type: (...) -> Optional["_models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -478,7 +485,8 @@ def _test_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -495,10 +503,10 @@ def begin_test( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function=None, # type: Optional["models.Function"] + function=None, # type: Optional["_models.Function"] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + # type: (...) -> LROPoller["_models.ResourceTestStatus"] """Tests if the information provided for a function is valid. This can range from testing the connection to the underlying web service behind the function or making sure the function code provided is syntactically correct. @@ -517,8 +525,8 @@ def begin_test( :type function: ~stream_analytics_management_client.models.Function :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -526,7 +534,7 @@ def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -552,7 +560,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'functionName': self._serialize.url("function_name", function_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -571,10 +586,10 @@ def retrieve_default_definition( resource_group_name, # type: str job_name, # type: str function_name, # type: str - function_retrieve_default_definition_parameters=None, # type: Optional["models.FunctionRetrieveDefaultDefinitionParameters"] + function_retrieve_default_definition_parameters=None, # type: Optional["_models.FunctionRetrieveDefaultDefinitionParameters"] **kwargs # type: Any ): - # type: (...) -> "models.Function" + # type: (...) -> "_models.Function" """Retrieves the default definition of a function based on the parameters specified. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -591,12 +606,12 @@ def retrieve_default_definition( :rtype: ~stream_analytics_management_client.models.Function :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Function"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Function"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -631,7 +646,8 @@ def retrieve_default_definition( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Function', pipeline_response) @@ -639,4 +655,4 @@ def retrieve_default_definition( return cls(pipeline_response, deserialized, {}) return deserialized - retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/RetrieveDefaultDefinition'} # type: ignore + retrieve_default_definition.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py index 890d33f1b8b1..0aad5893d5b7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class InputsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input, # type: "models.Input" + input, # type: "_models.Input" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Input" + # type: (...) -> "_models.Input" """Creates an input or replaces an already existing input under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -80,12 +80,12 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -121,7 +121,8 @@ def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -143,11 +144,11 @@ def update( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input, # type: "models.Input" + input, # type: "_models.Input" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Input" + # type: (...) -> "_models.Input" """Updates an existing input under an existing streaming job. This can be used to partially update (ie. update one or two properties) an input without affecting the rest the job or input definition. @@ -171,12 +172,12 @@ def update( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -210,7 +211,8 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -248,7 +250,8 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -266,6 +269,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -273,7 +277,8 @@ def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -287,7 +292,7 @@ def get( input_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Input" + # type: (...) -> "_models.Input" """Gets details about the specified input. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -301,12 +306,12 @@ def get( :rtype: ~stream_analytics_management_client.models.Input :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Input"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Input"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -333,7 +338,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -352,7 +358,7 @@ def list_by_streaming_job( select=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.InputListResult"] + # type: (...) -> Iterable["_models.InputListResult"] """Lists all of the inputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -368,12 +374,12 @@ def list_by_streaming_job( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.InputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.InputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.InputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -417,8 +423,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -432,16 +439,16 @@ def _test_initial( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input=None, # type: Optional["models.Input"] + input=None, # type: Optional["_models.Input"] **kwargs # type: Any ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + # type: (...) -> Optional["_models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -476,7 +483,8 @@ def _test_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -493,10 +501,10 @@ def begin_test( resource_group_name, # type: str job_name, # type: str input_name, # type: str - input=None, # type: Optional["models.Input"] + input=None, # type: Optional["_models.Input"] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + # type: (...) -> LROPoller["_models.ResourceTestStatus"] """Tests whether an input’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -514,8 +522,8 @@ def begin_test( :type input: ~stream_analytics_management_client.models.Input :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -523,7 +531,7 @@ def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -549,7 +557,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'inputName': self._serialize.url("input_name", input_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py index 1a63db586859..51dee0820a8d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -37,7 +37,7 @@ class Operations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,7 +49,7 @@ def list( self, **kwargs # type: Any ): - # type: (...) -> Iterable["models.OperationListResult"] + # type: (...) -> Iterable["_models.OperationListResult"] """Lists all of the available Stream Analytics related operations. :keyword callable cls: A custom type or function that will be passed the direct response @@ -57,12 +57,12 @@ def list( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -98,8 +98,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py index a18f17686979..1dded79f5d86 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class OutputsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output, # type: "models.Output" + output, # type: "_models.Output" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Output" + # type: (...) -> "_models.Output" """Creates an output or replaces an already existing output under an existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,12 +81,12 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -122,7 +122,8 @@ def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -144,11 +145,11 @@ def update( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output, # type: "models.Output" + output, # type: "_models.Output" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Output" + # type: (...) -> "_models.Output" """Updates an existing output under an existing streaming job. This can be used to partially update (ie. update one or two properties) an output without affecting the rest the job or output definition. @@ -173,12 +174,12 @@ def update( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -212,7 +213,8 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -250,7 +252,8 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -268,6 +271,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -275,7 +279,8 @@ def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -289,7 +294,7 @@ def get( output_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Output" + # type: (...) -> "_models.Output" """Gets details about the specified output. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -303,12 +308,12 @@ def get( :rtype: ~stream_analytics_management_client.models.Output :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Output"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Output"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -335,7 +340,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -354,7 +360,7 @@ def list_by_streaming_job( select=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.OutputListResult"] + # type: (...) -> Iterable["_models.OutputListResult"] """Lists all of the outputs under the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -370,12 +376,12 @@ def list_by_streaming_job( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.OutputListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OutputListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.OutputListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -419,8 +425,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -434,16 +441,16 @@ def _test_initial( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output=None, # type: Optional["models.Output"] + output=None, # type: Optional["_models.Output"] **kwargs # type: Any ): - # type: (...) -> Optional["models.ResourceTestStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ResourceTestStatus"]] + # type: (...) -> Optional["_models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ResourceTestStatus"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -478,7 +485,8 @@ def _test_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -495,10 +503,10 @@ def begin_test( resource_group_name, # type: str job_name, # type: str output_name, # type: str - output=None, # type: Optional["models.Output"] + output=None, # type: Optional["_models.Output"] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.ResourceTestStatus"] + # type: (...) -> LROPoller["_models.ResourceTestStatus"] """Tests whether an output’s datasource is reachable and usable by the Azure Stream Analytics service. @@ -516,8 +524,8 @@ def begin_test( :type output: ~stream_analytics_management_client.models.Output :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ResourceTestStatus or the result of cls(response) @@ -525,7 +533,7 @@ def begin_test( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.ResourceTestStatus"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceTestStatus"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -551,7 +559,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'outputName': self._serialize.url("output_name", output_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py index e194d816d90c..44eacae87e72 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class PrivateEndpointsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,12 +52,12 @@ def create_or_update( resource_group_name, # type: str cluster_name, # type: str private_endpoint_name, # type: str - private_endpoint, # type: "models.PrivateEndpoint" + private_endpoint, # type: "_models.PrivateEndpoint" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.PrivateEndpoint" + # type: (...) -> "_models.PrivateEndpoint" """Creates a Stream Analytics Private Endpoint or replaces an already existing Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -81,12 +81,12 @@ def create_or_update( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -122,7 +122,7 @@ def create_or_update( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -144,7 +144,7 @@ def get( private_endpoint_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.PrivateEndpoint" + # type: (...) -> "_models.PrivateEndpoint" """Gets information about the specified Private Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -158,12 +158,12 @@ def get( :rtype: ~stream_analytics_management_client.models.PrivateEndpoint :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpoint"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpoint"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -190,7 +190,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpoint', pipeline_response) @@ -214,7 +214,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -241,7 +241,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -267,8 +267,8 @@ def begin_delete( :type private_endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -298,7 +298,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'privateEndpointName': self._serialize.url("private_endpoint_name", private_endpoint_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -318,7 +325,7 @@ def list_by_cluster( cluster_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["models.PrivateEndpointListResult"] + # type: (...) -> Iterable["_models.PrivateEndpointListResult"] """Lists the private endpoints in the cluster. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -330,12 +337,12 @@ def list_by_cluster( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.PrivateEndpointListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-03-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -377,7 +384,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(models.Error, response) + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py index f3228537a7ed..7e2044b6b1d9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -39,7 +39,7 @@ class StreamingJobsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -51,18 +51,18 @@ def _create_or_replace_initial( self, resource_group_name, # type: str job_name, # type: str - streaming_job, # type: "models.StreamingJob" + streaming_job, # type: "_models.StreamingJob" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.StreamingJob" - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + # type: (...) -> "_models.StreamingJob" + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -97,7 +97,8 @@ def _create_or_replace_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -118,12 +119,12 @@ def begin_create_or_replace( self, resource_group_name, # type: str job_name, # type: str - streaming_job, # type: "models.StreamingJob" + streaming_job, # type: "_models.StreamingJob" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["models.StreamingJob"] + # type: (...) -> LROPoller["_models.StreamingJob"] """Creates a streaming job or replaces an already existing streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -143,8 +144,8 @@ def begin_create_or_replace( :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either StreamingJob or the result of cls(response) @@ -152,7 +153,7 @@ def begin_create_or_replace( :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -182,7 +183,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, response_headers) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -200,11 +207,11 @@ def update( self, resource_group_name, # type: str job_name, # type: str - streaming_job, # type: "models.StreamingJob" + streaming_job, # type: "_models.StreamingJob" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.StreamingJob" + # type: (...) -> "_models.StreamingJob" """Updates an existing streaming job. This can be used to partially update (ie. update one or two properties) a streaming job without affecting the rest the job definition. @@ -226,12 +233,12 @@ def update( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -264,7 +271,8 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -288,7 +296,8 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore @@ -305,6 +314,7 @@ def _delete_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -312,7 +322,8 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -334,8 +345,8 @@ def begin_delete( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -364,7 +375,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -385,7 +402,7 @@ def get( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.StreamingJob" + # type: (...) -> "_models.StreamingJob" """Gets details about the specified streaming job. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -402,12 +419,12 @@ def get( :rtype: ~stream_analytics_management_client.models.StreamingJob :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJob"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJob"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -435,7 +452,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -453,7 +471,7 @@ def list_by_resource_group( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.StreamingJobListResult"] + # type: (...) -> Iterable["_models.StreamingJobListResult"] """Lists all of the streaming jobs in the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -468,12 +486,12 @@ def list_by_resource_group( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -516,8 +534,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -531,7 +550,7 @@ def list( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["models.StreamingJobListResult"] + # type: (...) -> Iterable["_models.StreamingJobListResult"] """Lists all of the streaming jobs in the given subscription. :param expand: The $expand OData query parameter. This is a comma-separated list of additional @@ -544,12 +563,12 @@ def list( :rtype: ~azure.core.paging.ItemPaged[~stream_analytics_management_client.models.StreamingJobListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.StreamingJobListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.StreamingJobListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -591,8 +610,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -605,7 +625,7 @@ def _start_initial( self, resource_group_name, # type: str job_name, # type: str - start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] + start_job_parameters=None, # type: Optional["_models.StartStreamingJobParameters"] **kwargs # type: Any ): # type: (...) -> None @@ -614,8 +634,9 @@ def _start_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -633,6 +654,7 @@ def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if start_job_parameters is not None: @@ -646,7 +668,8 @@ def _start_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -657,7 +680,7 @@ def begin_start( self, resource_group_name, # type: str job_name, # type: str - start_job_parameters=None, # type: Optional["models.StartStreamingJobParameters"] + start_job_parameters=None, # type: Optional["_models.StartStreamingJobParameters"] **kwargs # type: Any ): # type: (...) -> LROPoller[None] @@ -672,8 +695,8 @@ def begin_start( :type start_job_parameters: ~stream_analytics_management_client.models.StartStreamingJobParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -703,7 +726,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -729,7 +758,8 @@ def _stop_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -746,6 +776,7 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -753,7 +784,8 @@ def _stop_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -776,8 +808,8 @@ def begin_stop( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -806,7 +838,13 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -819,3 +857,127 @@ def get_long_running_output(pipeline_response): else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop'} # type: ignore + + def _scale_initial( + self, + resource_group_name, # type: str + job_name, # type: str + scale_job_parameters=None, # type: Optional["_models.ScaleStreamingJobParameters"] + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-03-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._scale_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if scale_job_parameters is not None: + body_content = self._serialize.body(scale_job_parameters, 'ScaleStreamingJobParameters') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _scale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore + + def begin_scale( + self, + resource_group_name, # type: str + job_name, # type: str + scale_job_parameters=None, # type: Optional["_models.ScaleStreamingJobParameters"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Scales a streaming job when the job is running. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param job_name: The name of the streaming job. + :type job_name: str + :param scale_job_parameters: Parameters applicable to a scale streaming job operation. + :type scale_job_parameters: ~stream_analytics_management_client.models.ScaleStreamingJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._scale_initial( + resource_group_name=resource_group_name, + job_name=job_name, + scale_job_parameters=scale_job_parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'jobName': self._serialize.url("job_name", job_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_scale.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale'} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py index 81d2bbf70cdb..2617149c79d8 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -13,7 +13,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -36,7 +36,7 @@ class SubscriptionsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,7 +49,7 @@ def list_quotas( location, # type: str **kwargs # type: Any ): - # type: (...) -> "models.SubscriptionQuotasListResult" + # type: (...) -> "_models.SubscriptionQuotasListResult" """Retrieves the subscription's current quota information in a particular region. :param location: The region in which to retrieve the subscription's quota information. You can @@ -61,12 +61,12 @@ def list_quotas( :rtype: ~stream_analytics_management_client.models.SubscriptionQuotasListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SubscriptionQuotasListResult"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionQuotasListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -91,7 +91,8 @@ def list_quotas( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SubscriptionQuotasListResult', pipeline_response) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py index 49f318e3a748..924569d78143 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -13,7 +13,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models +from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -36,7 +36,7 @@ class TransformationsOperations(object): :param deserializer: An object model deserializer. """ - models = models + models = _models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,12 +49,12 @@ def create_or_replace( resource_group_name, # type: str job_name, # type: str transformation_name, # type: str - transformation, # type: "models.Transformation" + transformation, # type: "_models.Transformation" if_match=None, # type: Optional[str] if_none_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Transformation" + # type: (...) -> "_models.Transformation" """Creates a transformation or replaces an already existing transformation under an existing streaming job. @@ -80,12 +80,12 @@ def create_or_replace( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -121,7 +121,8 @@ def create_or_replace( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: @@ -143,11 +144,11 @@ def update( resource_group_name, # type: str job_name, # type: str transformation_name, # type: str - transformation, # type: "models.Transformation" + transformation, # type: "_models.Transformation" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.Transformation" + # type: (...) -> "_models.Transformation" """Updates an existing transformation under an existing streaming job. This can be used to partially update (ie. update one or two properties) a transformation without affecting the rest the job or transformation definition. @@ -173,12 +174,12 @@ def update( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -212,7 +213,8 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) @@ -231,7 +233,7 @@ def get( transformation_name, # type: str **kwargs # type: Any ): - # type: (...) -> "models.Transformation" + # type: (...) -> "_models.Transformation" """Gets details about the specified transformation. :param resource_group_name: The name of the resource group. The name is case insensitive. @@ -245,12 +247,12 @@ def get( :rtype: ~stream_analytics_management_client.models.Transformation :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Transformation"] + cls = kwargs.pop('cls', None) # type: ClsType["_models.Transformation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2017-04-01-preview" + api_version = "2020-03-01" accept = "application/json" # Construct URL @@ -277,7 +279,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.Error, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))