From 3591492382a986f77621b94331257ea0ae9e2d6f Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Thu, 8 Jul 2021 05:39:18 +0000 Subject: [PATCH] CodeGen from PR 14925 in Azure/azure-rest-api-specs Merge 550824666f8611250ce5240085d5ef293da294fa into c234651f2885c07ff4a77e50c883145949fda5fa --- .../azure-mgmt-datafactory/MANIFEST.in | 1 + .../azure-mgmt-datafactory/_meta.json | 11 + .../_data_factory_management_client.py | 35 + .../azure/mgmt/datafactory/_metadata.json | 122 + .../azure/mgmt/datafactory/_version.py | 2 +- .../aio/_data_factory_management_client.py | 34 + .../datafactory/aio/operations/__init__.py | 6 + .../operations/_activity_runs_operations.py | 2 +- .../_data_flow_debug_session_operations.py | 22 +- .../aio/operations/_data_flows_operations.py | 8 +- .../aio/operations/_datasets_operations.py | 8 +- .../_exposure_control_operations.py | 6 +- .../aio/operations/_factories_operations.py | 18 +- .../_integration_runtime_nodes_operations.py | 8 +- ...tion_runtime_object_metadata_operations.py | 10 +- .../_integration_runtimes_operations.py | 106 +- .../operations/_linked_services_operations.py | 8 +- .../_managed_private_endpoints_operations.py | 8 +- .../_managed_virtual_networks_operations.py | 6 +- .../datafactory/aio/operations/_operations.py | 2 +- .../operations/_pipeline_runs_operations.py | 6 +- .../aio/operations/_pipelines_operations.py | 14 +- ...rivate_end_point_connections_operations.py | 116 + ..._private_endpoint_connection_operations.py | 245 + .../_private_link_resources_operations.py | 99 + .../operations/_trigger_runs_operations.py | 6 +- .../aio/operations/_triggers_operations.py | 44 +- .../azure/mgmt/datafactory/models/__init__.py | 64 +- .../_data_factory_management_client_enums.py | 46 +- .../azure/mgmt/datafactory/models/_models.py | 7145 +++++---- .../mgmt/datafactory/models/_models_py3.py | 13271 +++++++++------- .../mgmt/datafactory/operations/__init__.py | 6 + .../_data_flow_debug_session_operations.py | 8 +- ...tion_runtime_object_metadata_operations.py | 4 +- .../_integration_runtimes_operations.py | 71 +- .../operations/_pipelines_operations.py | 4 +- ...rivate_end_point_connections_operations.py | 121 + ..._private_endpoint_connection_operations.py | 252 + .../_private_link_resources_operations.py | 104 + .../operations/_triggers_operations.py | 16 +- 40 files changed, 13060 insertions(+), 9005 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/_meta.json create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in +++ b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json new file mode 100644 index 000000000000..9e1a23ddfe54 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.4.2", + "use": [ + "@autorest/python@5.8.1", + "@autorest/modelerfour@4.19.2" + ], + "commit": "a95fdd2d4329a45a062e63a6caec445ffa1c8a2f", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.1 --use=@autorest/modelerfour@4.19.2 --version=3.4.2", + "readme": "specification/datafactory/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index ad3cab87fd33..8d4043c2530d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -16,6 +16,7 @@ from typing import Any, Optional from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse from ._configuration import DataFactoryManagementClientConfiguration from .operations import Operations @@ -35,6 +36,9 @@ from .operations import DataFlowDebugSessionOperations from .operations import ManagedVirtualNetworksOperations from .operations import ManagedPrivateEndpointsOperations +from .operations import PrivateEndPointConnectionsOperations +from .operations import PrivateEndpointConnectionOperations +from .operations import PrivateLinkResourcesOperations from . import models @@ -75,6 +79,12 @@ class DataFactoryManagementClient(object): :vartype managed_virtual_networks: azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations :vartype managed_private_endpoints: azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations + :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations + :vartype private_end_point_connections: azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations + :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations + :vartype private_endpoint_connection: azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. @@ -98,6 +108,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.operations = Operations( @@ -134,6 +145,30 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.managed_private_endpoints = ManagedPrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response def close(self): # type: () -> None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json new file mode 100644 index 000000000000..dd5e00735108 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json @@ -0,0 +1,122 @@ +{ + "chosen_version": "2018-06-01", + "total_api_version_list": ["2018-06-01"], + "client": { + "name": "DataFactoryManagementClient", + "filename": "_data_factory_management_client", + "description": "The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataFactoryManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "The subscription identifier.", + "docstring_type": "str", + "required": true + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "The subscription identifier.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "operations": "Operations", + "factories": "FactoriesOperations", + "exposure_control": "ExposureControlOperations", + "integration_runtimes": "IntegrationRuntimesOperations", + "integration_runtime_object_metadata": "IntegrationRuntimeObjectMetadataOperations", + "integration_runtime_nodes": "IntegrationRuntimeNodesOperations", + "linked_services": "LinkedServicesOperations", + "datasets": "DatasetsOperations", + "pipelines": "PipelinesOperations", + "pipeline_runs": "PipelineRunsOperations", + "activity_runs": "ActivityRunsOperations", + "triggers": "TriggersOperations", + "trigger_runs": "TriggerRunsOperations", + "data_flows": "DataFlowsOperations", + "data_flow_debug_session": "DataFlowDebugSessionOperations", + "managed_virtual_networks": "ManagedVirtualNetworksOperations", + "managed_private_endpoints": "ManagedPrivateEndpointsOperations", + "private_end_point_connections": "PrivateEndPointConnectionsOperations", + "private_endpoint_connection": "PrivateEndpointConnectionOperations", + "private_link_resources": "PrivateLinkResourcesOperations" + } +} \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 59deb8c7263b..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.1.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py index b7e631bb8c8f..b0adc1172540 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py @@ -8,6 +8,7 @@ from typing import Any, Optional, TYPE_CHECKING +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer @@ -33,6 +34,9 @@ from .operations import DataFlowDebugSessionOperations from .operations import ManagedVirtualNetworksOperations from .operations import ManagedPrivateEndpointsOperations +from .operations import PrivateEndPointConnectionsOperations +from .operations import PrivateEndpointConnectionOperations +from .operations import PrivateLinkResourcesOperations from .. import models @@ -73,6 +77,12 @@ class DataFactoryManagementClient(object): :vartype managed_virtual_networks: azure.mgmt.datafactory.aio.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations :vartype managed_private_endpoints: azure.mgmt.datafactory.aio.operations.ManagedPrivateEndpointsOperations + :ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations + :vartype private_end_point_connections: azure.mgmt.datafactory.aio.operations.PrivateEndPointConnectionsOperations + :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations + :vartype private_endpoint_connection: azure.mgmt.datafactory.aio.operations.PrivateEndpointConnectionOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. @@ -95,6 +105,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.operations = Operations( @@ -131,6 +142,29 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.managed_private_endpoints = ManagedPrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response async def close(self) -> None: await self._client.close() diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py index 95d268579097..c1da8c996a37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py @@ -23,6 +23,9 @@ from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations +from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations +from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations __all__ = [ 'Operations', @@ -42,4 +45,7 @@ 'DataFlowDebugSessionOperations', 'ManagedVirtualNetworksOperations', 'ManagedPrivateEndpointsOperations', + 'PrivateEndPointConnectionsOperations', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourcesOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py index 1e1a2747cead..218ae0a7fe2a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py @@ -46,7 +46,7 @@ async def query_by_pipeline_run( factory_name: str, run_id: str, filter_parameters: "_models.RunFilterParameters", - **kwargs + **kwargs: Any ) -> "_models.ActivityRunsQueryResponse": """Query activity runs based on input filter conditions. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py index 3501e1ba3e8b..db31ffe70764 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -48,7 +48,7 @@ async def _create_initial( resource_group_name: str, factory_name: str, request: "_models.CreateDataFlowDebugSessionRequest", - **kwargs + **kwargs: Any ) -> Optional["_models.CreateDataFlowDebugSessionResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.CreateDataFlowDebugSessionResponse"]] error_map = { @@ -107,7 +107,7 @@ async def begin_create( resource_group_name: str, factory_name: str, request: "_models.CreateDataFlowDebugSessionRequest", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.CreateDataFlowDebugSessionResponse"]: """Creates a data flow debug session. @@ -119,8 +119,8 @@ async def begin_create( :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) @@ -177,7 +177,7 @@ def query_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.QueryDataFlowDebugSessionsResponse"]: """Query all active data flow debug sessions. @@ -252,7 +252,7 @@ async def add_data_flow( resource_group_name: str, factory_name: str, request: "_models.DataFlowDebugPackage", - **kwargs + **kwargs: Any ) -> "_models.AddDataFlowToDebugSessionResponse": """Add a data flow into debug session. @@ -318,7 +318,7 @@ async def delete( resource_group_name: str, factory_name: str, request: "_models.DeleteDataFlowDebugSessionRequest", - **kwargs + **kwargs: Any ) -> None: """Deletes a data flow debug session. @@ -381,7 +381,7 @@ async def _execute_command_initial( resource_group_name: str, factory_name: str, request: "_models.DataFlowDebugCommandRequest", - **kwargs + **kwargs: Any ) -> Optional["_models.DataFlowDebugCommandResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.DataFlowDebugCommandResponse"]] error_map = { @@ -440,7 +440,7 @@ async def begin_execute_command( resource_group_name: str, factory_name: str, request: "_models.DataFlowDebugCommandRequest", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.DataFlowDebugCommandResponse"]: """Execute a data flow debug command. @@ -452,8 +452,8 @@ async def begin_execute_command( :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py index 543b927eddd3..e1ef45dc21e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py @@ -48,7 +48,7 @@ async def create_or_update( data_flow_name: str, data_flow: "_models.DataFlowResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.DataFlowResource": """Creates or updates a data flow. @@ -123,7 +123,7 @@ async def get( factory_name: str, data_flow_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.DataFlowResource": """Gets a data flow. @@ -190,7 +190,7 @@ async def delete( resource_group_name: str, factory_name: str, data_flow_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a data flow. @@ -248,7 +248,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.DataFlowListResponse"]: """Lists data flows. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py index d624050ab9a4..4e5c8c3a5b7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py @@ -45,7 +45,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.DatasetListResponse"]: """Lists datasets. @@ -122,7 +122,7 @@ async def create_or_update( dataset_name: str, dataset: "_models.DatasetResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.DatasetResource": """Creates or updates a dataset. @@ -197,7 +197,7 @@ async def get( factory_name: str, dataset_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> Optional["_models.DatasetResource"]: """Gets a dataset. @@ -266,7 +266,7 @@ async def delete( resource_group_name: str, factory_name: str, dataset_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a dataset. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py index 31c30d97f79b..9ea521937af2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py @@ -44,7 +44,7 @@ async def get_feature_value( self, location_id: str, exposure_control_request: "_models.ExposureControlRequest", - **kwargs + **kwargs: Any ) -> "_models.ExposureControlResponse": """Get exposure control feature for specific location. @@ -107,7 +107,7 @@ async def get_feature_value_by_factory( resource_group_name: str, factory_name: str, exposure_control_request: "_models.ExposureControlRequest", - **kwargs + **kwargs: Any ) -> "_models.ExposureControlResponse": """Get exposure control feature for specific factory. @@ -173,7 +173,7 @@ async def query_feature_values_by_factory( resource_group_name: str, factory_name: str, exposure_control_batch_request: "_models.ExposureControlBatchRequest", - **kwargs + **kwargs: Any ) -> "_models.ExposureControlBatchResponse": """Get list of exposure control features for specific factory. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py index c4bde5d0d9de..04705d6abc97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FactoryListResponse"]: """Lists factories under the specified subscription. @@ -111,7 +111,7 @@ async def configure_factory_repo( self, location_id: str, factory_repo_update: "_models.FactoryRepoUpdate", - **kwargs + **kwargs: Any ) -> "_models.Factory": """Updates a factory's repo information. @@ -172,7 +172,7 @@ async def configure_factory_repo( def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.FactoryListResponse"]: """Lists factories. @@ -245,7 +245,7 @@ async def create_or_update( factory_name: str, factory: "_models.Factory", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.Factory": """Creates or updates a factory. @@ -316,7 +316,7 @@ async def update( resource_group_name: str, factory_name: str, factory_update_parameters: "_models.FactoryUpdateParameters", - **kwargs + **kwargs: Any ) -> "_models.Factory": """Updates a factory. @@ -382,7 +382,7 @@ async def get( resource_group_name: str, factory_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> Optional["_models.Factory"]: """Gets a factory. @@ -447,7 +447,7 @@ async def delete( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a factory. @@ -503,7 +503,7 @@ async def get_git_hub_access_token( resource_group_name: str, factory_name: str, git_hub_access_token_request: "_models.GitHubAccessTokenRequest", - **kwargs + **kwargs: Any ) -> "_models.GitHubAccessTokenResponse": """Get GitHub Access Token. @@ -569,7 +569,7 @@ async def get_data_plane_access( resource_group_name: str, factory_name: str, policy: "_models.UserAccessPolicy", - **kwargs + **kwargs: Any ) -> "_models.AccessPolicyResponse": """Get Data Plane access. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py index c1f343c1d878..ad2c99d99edb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -46,7 +46,7 @@ async def get( factory_name: str, integration_runtime_name: str, node_name: str, - **kwargs + **kwargs: Any ) -> "_models.SelfHostedIntegrationRuntimeNode": """Gets a self-hosted integration runtime node. @@ -112,7 +112,7 @@ async def delete( factory_name: str, integration_runtime_name: str, node_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a self-hosted integration runtime node. @@ -176,7 +176,7 @@ async def update( integration_runtime_name: str, node_name: str, update_integration_runtime_node_request: "_models.UpdateIntegrationRuntimeNodeRequest", - **kwargs + **kwargs: Any ) -> "_models.SelfHostedIntegrationRuntimeNode": """Updates a self-hosted integration runtime node. @@ -250,7 +250,7 @@ async def get_ip_address( factory_name: str, integration_runtime_name: str, node_name: str, - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeNodeIpAddress": """Get the IP address of self-hosted integration runtime node. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index a89e3d327ed9..c121228898d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -47,7 +47,7 @@ async def _refresh_initial( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> Optional["_models.SsisObjectMetadataStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SsisObjectMetadataStatusResponse"]] error_map = { @@ -98,7 +98,7 @@ async def begin_refresh( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.SsisObjectMetadataStatusResponse"]: """Refresh a SSIS integration runtime object metadata. @@ -110,8 +110,8 @@ async def begin_refresh( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) @@ -171,7 +171,7 @@ async def get( factory_name: str, integration_runtime_name: str, get_metadata_request: Optional["_models.GetSsisObjectMetadataRequest"] = None, - **kwargs + **kwargs: Any ) -> "_models.SsisObjectMetadataListResponse": """Get a SSIS integration runtime object metadata by specified path. The return is pageable metadata list. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py index 1797baefb011..e4c75a64c58f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py @@ -47,7 +47,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.IntegrationRuntimeListResponse"]: """Lists integration runtimes. @@ -124,7 +124,7 @@ async def create_or_update( integration_runtime_name: str, integration_runtime: "_models.IntegrationRuntimeResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeResource": """Creates or updates an integration runtime. @@ -199,7 +199,7 @@ async def get( factory_name: str, integration_runtime_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> Optional["_models.IntegrationRuntimeResource"]: """Gets an integration runtime. @@ -270,7 +270,7 @@ async def update( factory_name: str, integration_runtime_name: str, update_integration_runtime_request: "_models.UpdateIntegrationRuntimeRequest", - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeResource": """Updates an integration runtime. @@ -339,7 +339,7 @@ async def delete( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes an integration runtime. @@ -398,7 +398,7 @@ async def get_status( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeStatusResponse": """Gets detailed status information for an integration runtime. @@ -455,12 +455,74 @@ async def get_status( return deserialized get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + async def outbound_network_dependencies_endpoints( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs: Any + ) -> "_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse": + """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.outbound_network_dependencies_endpoints.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + async def get_connection_info( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeConnectionInfo": """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. @@ -524,7 +586,7 @@ async def regenerate_auth_key( factory_name: str, integration_runtime_name: str, regenerate_key_parameters: "_models.IntegrationRuntimeRegenerateKeyParameters", - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeAuthKeys": """Regenerates the authentication key for an integration runtime. @@ -594,7 +656,7 @@ async def list_auth_keys( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeAuthKeys": """Retrieves the authentication keys for an integration runtime. @@ -656,7 +718,7 @@ async def _start_initial( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> Optional["_models.IntegrationRuntimeStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.IntegrationRuntimeStatusResponse"]] error_map = { @@ -707,7 +769,7 @@ async def begin_start( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.IntegrationRuntimeStatusResponse"]: """Starts a ManagedReserved type integration runtime. @@ -719,8 +781,8 @@ async def begin_start( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) @@ -779,7 +841,7 @@ async def _stop_initial( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -825,7 +887,7 @@ async def begin_stop( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a ManagedReserved type integration runtime. @@ -837,8 +899,8 @@ async def begin_stop( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -894,7 +956,7 @@ async def sync_credentials( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> None: """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the @@ -956,7 +1018,7 @@ async def get_monitoring_data( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeMonitoringData": """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. @@ -1019,7 +1081,7 @@ async def upgrade( resource_group_name: str, factory_name: str, integration_runtime_name: str, - **kwargs + **kwargs: Any ) -> None: """Upgrade self-hosted integration runtime to latest version if availability. @@ -1079,7 +1141,7 @@ async def remove_links( factory_name: str, integration_runtime_name: str, linked_integration_runtime_request: "_models.LinkedIntegrationRuntimeRequest", - **kwargs + **kwargs: Any ) -> None: """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. @@ -1148,7 +1210,7 @@ async def create_linked_integration_runtime( factory_name: str, integration_runtime_name: str, create_linked_integration_runtime_request: "_models.CreateLinkedIntegrationRuntimeRequest", - **kwargs + **kwargs: Any ) -> "_models.IntegrationRuntimeStatusResponse": """Create a linked integration runtime entry in a shared integration runtime. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py index ebc43b4155e2..d174417dc70e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py @@ -45,7 +45,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.LinkedServiceListResponse"]: """Lists linked services. @@ -122,7 +122,7 @@ async def create_or_update( linked_service_name: str, linked_service: "_models.LinkedServiceResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.LinkedServiceResource": """Creates or updates a linked service. @@ -197,7 +197,7 @@ async def get( factory_name: str, linked_service_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> Optional["_models.LinkedServiceResource"]: """Gets a linked service. @@ -267,7 +267,7 @@ async def delete( resource_group_name: str, factory_name: str, linked_service_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a linked service. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py index 7a11548e216d..944fbb103654 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -46,7 +46,7 @@ def list_by_factory( resource_group_name: str, factory_name: str, managed_virtual_network_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ManagedPrivateEndpointListResponse"]: """Lists managed private endpoints. @@ -127,7 +127,7 @@ async def create_or_update( managed_private_endpoint_name: str, managed_private_endpoint: "_models.ManagedPrivateEndpointResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ManagedPrivateEndpointResource": """Creates or updates a managed private endpoint. @@ -206,7 +206,7 @@ async def get( managed_virtual_network_name: str, managed_private_endpoint_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ManagedPrivateEndpointResource": """Gets a managed private endpoint. @@ -278,7 +278,7 @@ async def delete( factory_name: str, managed_virtual_network_name: str, managed_private_endpoint_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a managed private endpoint. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py index 78249f180b75..4a760a88b537 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -45,7 +45,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.ManagedVirtualNetworkListResponse"]: """Lists managed Virtual Networks. @@ -122,7 +122,7 @@ async def create_or_update( managed_virtual_network_name: str, managed_virtual_network: "_models.ManagedVirtualNetworkResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ManagedVirtualNetworkResource": """Creates or updates a managed Virtual Network. @@ -197,7 +197,7 @@ async def get( factory_name: str, managed_virtual_network_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.ManagedVirtualNetworkResource": """Gets a managed Virtual Network. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py index 4db0a04c5770..a829cafd0bfa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResponse"]: """Lists the available Azure Data Factory API operations. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py index cf4320c7e29e..a6c0ff7add0a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py @@ -45,7 +45,7 @@ async def query_by_factory( resource_group_name: str, factory_name: str, filter_parameters: "_models.RunFilterParameters", - **kwargs + **kwargs: Any ) -> "_models.PipelineRunsQueryResponse": """Query pipeline runs in the factory based on input filter conditions. @@ -111,7 +111,7 @@ async def get( resource_group_name: str, factory_name: str, run_id: str, - **kwargs + **kwargs: Any ) -> "_models.PipelineRun": """Get a pipeline run by its run ID. @@ -174,7 +174,7 @@ async def cancel( factory_name: str, run_id: str, is_recursive: Optional[bool] = None, - **kwargs + **kwargs: Any ) -> None: """Cancel a pipeline run by its run ID. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py index 0a2ecc0e072c..fb22ca9c161e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py @@ -45,7 +45,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.PipelineListResponse"]: """Lists pipelines. @@ -122,7 +122,7 @@ async def create_or_update( pipeline_name: str, pipeline: "_models.PipelineResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.PipelineResource": """Creates or updates a pipeline. @@ -197,7 +197,7 @@ async def get( factory_name: str, pipeline_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> Optional["_models.PipelineResource"]: """Gets a pipeline. @@ -266,7 +266,7 @@ async def delete( resource_group_name: str, factory_name: str, pipeline_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a pipeline. @@ -329,8 +329,8 @@ async def create_run( is_recovery: Optional[bool] = None, start_activity_name: Optional[str] = None, start_from_failure: Optional[bool] = None, - parameters: Optional[Dict[str, object]] = None, - **kwargs + parameters: Optional[Dict[str, Any]] = None, + **kwargs: Any ) -> "_models.CreateRunResponse": """Creates a run of a pipeline. @@ -354,7 +354,7 @@ async def create_run( :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :keyword callable cls: A custom type or function that will be passed the direct response :return: CreateRunResponse, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py new file mode 100644 index 000000000000..e49a3ca3e19e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndPointConnectionsOperations: + """PrivateEndPointConnectionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnectionListResponse"]: + """Lists Private endpoint connections. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py new file mode 100644 index 000000000000..db02fd36b659 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -0,0 +1,245 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionOperations: + """PrivateEndpointConnectionOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + private_endpoint_wrapper: "_models.PrivateLinkConnectionApprovalRequestResource", + if_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.PrivateEndpointConnectionResource": + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: + :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> "_models.PrivateEndpointConnectionResource": + """Gets a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param if_none_match: ETag of the private endpoint connection entity. Should only be specified + for get. If the ETag matches the existing entity tag, or if * was provided, then no content + will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> None: + """Deletes a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..17f1abad0051 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations: + """PrivateLinkResourcesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + factory_name: str, + **kwargs: Any + ) -> "_models.PrivateLinkResourcesWrapper": + """Gets the private link resources. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourcesWrapper, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesWrapper"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourcesWrapper', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py index c16445e23e02..c43e180febb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py @@ -46,7 +46,7 @@ async def rerun( factory_name: str, trigger_name: str, run_id: str, - **kwargs + **kwargs: Any ) -> None: """Rerun single trigger instance by runId. @@ -109,7 +109,7 @@ async def cancel( factory_name: str, trigger_name: str, run_id: str, - **kwargs + **kwargs: Any ) -> None: """Cancel a single trigger instance by runId. @@ -171,7 +171,7 @@ async def query_by_factory( resource_group_name: str, factory_name: str, filter_parameters: "_models.RunFilterParameters", - **kwargs + **kwargs: Any ) -> "_models.TriggerRunsQueryResponse": """Query trigger runs. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py index 24512c88bbbd..1a2c6cc2a22f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py @@ -47,7 +47,7 @@ def list_by_factory( self, resource_group_name: str, factory_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.TriggerListResponse"]: """Lists triggers. @@ -122,7 +122,7 @@ async def query_by_factory( resource_group_name: str, factory_name: str, filter_parameters: "_models.TriggerFilterParameters", - **kwargs + **kwargs: Any ) -> "_models.TriggerQueryResponse": """Query triggers. @@ -190,7 +190,7 @@ async def create_or_update( trigger_name: str, trigger: "_models.TriggerResource", if_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> "_models.TriggerResource": """Creates or updates a trigger. @@ -265,7 +265,7 @@ async def get( factory_name: str, trigger_name: str, if_none_match: Optional[str] = None, - **kwargs + **kwargs: Any ) -> Optional["_models.TriggerResource"]: """Gets a trigger. @@ -334,7 +334,7 @@ async def delete( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> None: """Deletes a trigger. @@ -393,7 +393,7 @@ async def _subscribe_to_events_initial( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> Optional["_models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.TriggerSubscriptionOperationStatus"]] error_map = { @@ -444,7 +444,7 @@ async def begin_subscribe_to_events( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.TriggerSubscriptionOperationStatus"]: """Subscribe event trigger to events. @@ -456,8 +456,8 @@ async def begin_subscribe_to_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -516,7 +516,7 @@ async def get_event_subscription_status( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> "_models.TriggerSubscriptionOperationStatus": """Get a trigger's event subscription status. @@ -578,7 +578,7 @@ async def _unsubscribe_from_events_initial( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> Optional["_models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.TriggerSubscriptionOperationStatus"]] error_map = { @@ -629,7 +629,7 @@ async def begin_unsubscribe_from_events( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.TriggerSubscriptionOperationStatus"]: """Unsubscribe event trigger from events. @@ -641,8 +641,8 @@ async def begin_unsubscribe_from_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -701,7 +701,7 @@ async def _start_initial( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -747,7 +747,7 @@ async def begin_start( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Starts a trigger. @@ -759,8 +759,8 @@ async def begin_start( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -816,7 +816,7 @@ async def _stop_initial( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -862,7 +862,7 @@ async def begin_stop( resource_group_name: str, factory_name: str, trigger_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Stops a trigger. @@ -874,8 +874,8 @@ async def begin_stop( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index e6903a32cddc..7840c171ccca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -29,6 +29,7 @@ from ._models_py3 import AmazonS3Location from ._models_py3 import AmazonS3ReadSettings from ._models_py3 import AppendVariableActivity + from ._models_py3 import ArmIdWrapper from ._models_py3 import AvroDataset from ._models_py3 import AvroFormat from ._models_py3 import AvroSink @@ -335,6 +336,10 @@ from ._models_py3 import IntegrationRuntimeMonitoringData from ._models_py3 import IntegrationRuntimeNodeIpAddress from ._models_py3 import IntegrationRuntimeNodeMonitoringData + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpoint + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails + from ._models_py3 import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse from ._models_py3 import IntegrationRuntimeReference from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters from ._models_py3 import IntegrationRuntimeResource @@ -389,12 +394,14 @@ from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset from ._models_py3 import MarketoSource + from ._models_py3 import MetadataItem from ._models_py3 import MicrosoftAccessLinkedService from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbAtlasCollectionDataset from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSink from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties @@ -402,6 +409,7 @@ from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Sink from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService @@ -474,6 +482,14 @@ from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset from ._models_py3 import PrestoSource + from ._models_py3 import PrivateEndpointConnectionListResponse + from ._models_py3 import PrivateEndpointConnectionResource + from ._models_py3 import PrivateLinkConnectionApprovalRequest + from ._models_py3 import PrivateLinkConnectionApprovalRequestResource + from ._models_py3 import PrivateLinkConnectionState + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourceProperties + from ._models_py3 import PrivateLinkResourcesWrapper from ._models_py3 import QueryDataFlowDebugSessionsResponse from ._models_py3 import QuickBooksLinkedService from ._models_py3 import QuickBooksObjectDataset @@ -484,6 +500,7 @@ from ._models_py3 import RedshiftUnloadSettings from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset + from ._models_py3 import RemotePrivateEndpointConnection from ._models_py3 import RerunTumblingWindowTrigger from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService @@ -569,6 +586,7 @@ from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource + from ._models_py3 import SqlAlwaysEncryptedProperties from ._models_py3 import SqlDWSink from ._models_py3 import SqlDWSource from ._models_py3 import SqlMISink @@ -683,6 +701,7 @@ from ._models import AmazonS3Location # type: ignore from ._models import AmazonS3ReadSettings # type: ignore from ._models import AppendVariableActivity # type: ignore + from ._models import ArmIdWrapper # type: ignore from ._models import AvroDataset # type: ignore from ._models import AvroFormat # type: ignore from ._models import AvroSink # type: ignore @@ -989,6 +1008,10 @@ from ._models import IntegrationRuntimeMonitoringData # type: ignore from ._models import IntegrationRuntimeNodeIpAddress # type: ignore from ._models import IntegrationRuntimeNodeMonitoringData # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpoint # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails # type: ignore + from ._models import IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse # type: ignore from ._models import IntegrationRuntimeReference # type: ignore from ._models import IntegrationRuntimeRegenerateKeyParameters # type: ignore from ._models import IntegrationRuntimeResource # type: ignore @@ -1043,12 +1066,14 @@ from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore from ._models import MarketoSource # type: ignore + from ._models import MetadataItem # type: ignore from ._models import MicrosoftAccessLinkedService # type: ignore from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore from ._models import MongoDbAtlasCollectionDataset # type: ignore from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSink # type: ignore from ._models import MongoDbAtlasSource # type: ignore from ._models import MongoDbCollectionDataset # type: ignore from ._models import MongoDbCursorMethodsProperties # type: ignore @@ -1056,6 +1081,7 @@ from ._models import MongoDbSource # type: ignore from ._models import MongoDbV2CollectionDataset # type: ignore from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Sink # type: ignore from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import MySqlLinkedService # type: ignore @@ -1128,6 +1154,14 @@ from ._models import PrestoLinkedService # type: ignore from ._models import PrestoObjectDataset # type: ignore from ._models import PrestoSource # type: ignore + from ._models import PrivateEndpointConnectionListResponse # type: ignore + from ._models import PrivateEndpointConnectionResource # type: ignore + from ._models import PrivateLinkConnectionApprovalRequest # type: ignore + from ._models import PrivateLinkConnectionApprovalRequestResource # type: ignore + from ._models import PrivateLinkConnectionState # type: ignore + from ._models import PrivateLinkResource # type: ignore + from ._models import PrivateLinkResourceProperties # type: ignore + from ._models import PrivateLinkResourcesWrapper # type: ignore from ._models import QueryDataFlowDebugSessionsResponse # type: ignore from ._models import QuickBooksLinkedService # type: ignore from ._models import QuickBooksObjectDataset # type: ignore @@ -1138,6 +1172,7 @@ from ._models import RedshiftUnloadSettings # type: ignore from ._models import RelationalSource # type: ignore from ._models import RelationalTableDataset # type: ignore + from ._models import RemotePrivateEndpointConnection # type: ignore from ._models import RerunTumblingWindowTrigger # type: ignore from ._models import Resource # type: ignore from ._models import ResponsysLinkedService # type: ignore @@ -1223,6 +1258,7 @@ from ._models import SparkLinkedService # type: ignore from ._models import SparkObjectDataset # type: ignore from ._models import SparkSource # type: ignore + from ._models import SqlAlwaysEncryptedProperties # type: ignore from ._models import SqlDWSink # type: ignore from ._models import SqlDWSource # type: ignore from ._models import SqlMISink # type: ignore @@ -1319,7 +1355,7 @@ AvroCompressionCodec, AzureFunctionActivityMethod, AzureSearchIndexWriteBehaviorType, - BlobEventTypesEnum, + BlobEventTypes, CassandraSourceReadConsistencyLevels, CompressionCodec, CopyBehaviorType, @@ -1334,7 +1370,6 @@ DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, - DynamicsServicePrincipalCredentialType, DynamicsSinkWriteBehavior, EventSubscriptionStatus, FactoryIdentityType, @@ -1388,10 +1423,12 @@ SapTablePartitionOption, SelfHostedIntegrationRuntimeNodeStatus, ServiceNowAuthenticationType, + ServicePrincipalCredentialType, SftpAuthenticationType, SparkAuthenticationType, SparkServerType, SparkThriftTransportProtocol, + SqlAlwaysEncryptedAkvAuthType, SqlPartitionOption, SsisLogLocationType, SsisObjectMetadataType, @@ -1432,6 +1469,7 @@ 'AmazonS3Location', 'AmazonS3ReadSettings', 'AppendVariableActivity', + 'ArmIdWrapper', 'AvroDataset', 'AvroFormat', 'AvroSink', @@ -1738,6 +1776,10 @@ 'IntegrationRuntimeMonitoringData', 'IntegrationRuntimeNodeIpAddress', 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpoint', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails', + 'IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', 'IntegrationRuntimeReference', 'IntegrationRuntimeRegenerateKeyParameters', 'IntegrationRuntimeResource', @@ -1792,12 +1834,14 @@ 'MarketoLinkedService', 'MarketoObjectDataset', 'MarketoSource', + 'MetadataItem', 'MicrosoftAccessLinkedService', 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollectionDataset', 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSink', 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', @@ -1805,6 +1849,7 @@ 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Sink', 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', @@ -1877,6 +1922,14 @@ 'PrestoLinkedService', 'PrestoObjectDataset', 'PrestoSource', + 'PrivateEndpointConnectionListResponse', + 'PrivateEndpointConnectionResource', + 'PrivateLinkConnectionApprovalRequest', + 'PrivateLinkConnectionApprovalRequestResource', + 'PrivateLinkConnectionState', + 'PrivateLinkResource', + 'PrivateLinkResourceProperties', + 'PrivateLinkResourcesWrapper', 'QueryDataFlowDebugSessionsResponse', 'QuickBooksLinkedService', 'QuickBooksObjectDataset', @@ -1887,6 +1940,7 @@ 'RedshiftUnloadSettings', 'RelationalSource', 'RelationalTableDataset', + 'RemotePrivateEndpointConnection', 'RerunTumblingWindowTrigger', 'Resource', 'ResponsysLinkedService', @@ -1972,6 +2026,7 @@ 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', + 'SqlAlwaysEncryptedProperties', 'SqlDWSink', 'SqlDWSource', 'SqlMISink', @@ -2066,7 +2121,7 @@ 'AvroCompressionCodec', 'AzureFunctionActivityMethod', 'AzureSearchIndexWriteBehaviorType', - 'BlobEventTypesEnum', + 'BlobEventTypes', 'CassandraSourceReadConsistencyLevels', 'CompressionCodec', 'CopyBehaviorType', @@ -2081,7 +2136,6 @@ 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', - 'DynamicsServicePrincipalCredentialType', 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', 'FactoryIdentityType', @@ -2135,10 +2189,12 @@ 'SapTablePartitionOption', 'SelfHostedIntegrationRuntimeNodeStatus', 'ServiceNowAuthenticationType', + 'ServicePrincipalCredentialType', 'SftpAuthenticationType', 'SparkAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', + 'SqlAlwaysEncryptedAkvAuthType', 'SqlPartitionOption', 'SsisLogLocationType', 'SsisObjectMetadataType', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 9d920189dda8..c0b274bf2d3b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -53,7 +53,7 @@ class AzureSearchIndexWriteBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, MERGE = "Merge" UPLOAD = "Upload" -class BlobEventTypesEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class BlobEventTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" @@ -77,14 +77,16 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe LOCAL_SERIAL = "LOCAL_SERIAL" class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available compressionCodec values. + """ NONE = "none" - GZIP = "gzip" - SNAPPY = "snappy" LZO = "lzo" BZIP2 = "bzip2" + GZIP = "gzip" DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" LZ4 = "lz4" TAR = "tar" TAR_G_ZIP = "tarGZip" @@ -174,9 +176,7 @@ class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): COMPLETED = "Completed" class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' - for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in - online scenario. Type: string (or Expression with resultType string). + """All available dynamicsAuthenticationType values. """ OFFICE365 = "Office365" @@ -184,23 +184,12 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and - 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with - resultType string). + """All available dynamicsDeploymentType values. """ ONLINE = "Online" ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The service principal credential type to use in Server-To-Server authentication. - 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or - Expression with resultType string). - """ - - SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" - SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" - class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Defines values for DynamicsSinkWriteBehavior. """ @@ -267,7 +256,7 @@ class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum BASIC = "Basic" class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The node types on which the script action should be executed. + """All available HdiNodeTypes values. """ HEADNODE = "Headnode" @@ -352,6 +341,7 @@ class IntegrationRuntimeEntityReferenceType(with_metaclass(_CaseInsensitiveEnumM INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + CREDENTIAL_REFERENCE = "CredentialReference" class IntegrationRuntimeInternalChannelEncryptionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """It is used to set the encryption mode for node-node communication channel (when more than 2 @@ -417,8 +407,7 @@ class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) ARRAY_OF_OBJECTS = "arrayOfObjects" class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """File pattern of JSON. This setting controls the way a collection of JSON objects will be - treated. The default value is 'setOfObjects'. It is case-sensitive. + """All available filePatterns. """ SET_OF_OBJECTS = "setOfObjects" @@ -661,6 +650,13 @@ class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, BASIC = "Basic" O_AUTH2 = "OAuth2" +class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available servicePrincipalCredentialType values. + """ + + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ @@ -694,6 +690,14 @@ class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, SASL = "SASL" HTTP = "HTTP " +class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Sql always encrypted AKV authentication type. Type: string (or Expression with resultType + string). + """ + + SERVICE_PRINCIPAL = "ServicePrincipal" + MANAGED_IDENTITY = "ManagedIdentity" + class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 500676c7ed11..f58ccd6d8fa4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -40,13 +40,13 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: ControlActivity, ExecutionActivity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -74,7 +74,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'Container': 'ControlActivity', 'Execution': 'ExecutionActivity'} } def __init__( @@ -97,7 +97,7 @@ class ActivityDependency(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param activity: Required. Activity name. :type activity: str :param dependency_conditions: Required. Match-Condition for the dependency. @@ -130,14 +130,14 @@ class ActivityPolicy(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object + :type timeout: any :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type retry: object + :type retry: any :param retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. :type retry_interval_in_seconds: int @@ -182,7 +182,7 @@ class ActivityRun(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar pipeline_name: The name of the pipeline. :vartype pipeline_name: str :ivar pipeline_run_id: The id of the pipeline run. @@ -204,11 +204,11 @@ class ActivityRun(msrest.serialization.Model): :ivar duration_in_ms: The duration of the activity run. :vartype duration_in_ms: int :ivar input: The input for the activity. - :vartype input: object + :vartype input: any :ivar output: The output for the activity. - :vartype output: object + :vartype output: any :ivar error: The error if any from the activity run. - :vartype error: object + :vartype error: any """ _validation = { @@ -318,9 +318,9 @@ class AdditionalColumns(msrest.serialization.Model): """Specify the column name and value of additional columns. :param name: Additional column name. Type: string (or Expression with resultType string). - :type name: object + :type name: any :param value: Additional column value. Type: string (or Expression with resultType string). - :type value: object + :type value: any """ _attribute_map = { @@ -347,7 +347,7 @@ class LinkedService(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -357,7 +357,7 @@ class LinkedService(msrest.serialization.Model): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] """ _validation = { @@ -397,7 +397,7 @@ class AmazonMWSLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -407,36 +407,36 @@ class AmazonMWSLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). - :type endpoint: object + :type endpoint: any :param marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2). - :type marketplace_id: object + :type marketplace_id: any :param seller_id: Required. The Amazon seller ID. - :type seller_id: object + :type seller_id: any :param mws_auth_token: The Amazon MWS authentication token. :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object + :type access_key_id: any :param secret_key: The secret key used to access data. :type secret_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -494,23 +494,23 @@ class Dataset(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -560,28 +560,28 @@ class AmazonMWSObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -621,18 +621,21 @@ class CopySource(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -645,6 +648,7 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -661,6 +665,7 @@ def __init__( self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class TabularSource(CopySource): @@ -673,21 +678,24 @@ class TabularSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -703,6 +711,7 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -728,27 +737,30 @@ class AmazonMWSSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -761,6 +773,7 @@ class AmazonMWSSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -782,7 +795,7 @@ class AmazonRedshiftLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -792,25 +805,25 @@ class AmazonRedshiftLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password of the Amazon Redshift source. :type password: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :type port: object + :type port: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -855,26 +868,29 @@ class AmazonRedshiftSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. @@ -891,6 +907,7 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -914,35 +931,35 @@ class AmazonRedshiftTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The Amazon Redshift table name. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -983,7 +1000,7 @@ class AmazonS3CompatibleLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -993,10 +1010,10 @@ class AmazonS3CompatibleLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -1004,14 +1021,14 @@ class AmazonS3CompatibleLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). - :type force_path_style: object + :type force_path_style: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -1055,15 +1072,15 @@ class DatasetLocation(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -1099,21 +1116,21 @@ class AmazonS3CompatibleLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -1149,12 +1166,15 @@ class StoreReadSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -1165,6 +1185,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1179,6 +1200,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1188,42 +1210,45 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -1234,6 +1259,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1271,44 +1297,44 @@ class AmazonS3Dataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). - :type key: object + :type key: any :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param version: The version for the S3 object. Type: string (or Expression with resultType string). - :type version: object + :type version: any :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 object. @@ -1364,7 +1390,7 @@ class AmazonS3LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -1374,26 +1400,26 @@ class AmazonS3LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :type authentication_type: object + :type authentication_type: any :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param session_token: The session token for the S3 temporary security credential. :type session_token: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -1436,21 +1462,21 @@ class AmazonS3Location(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of amazon S3. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -1483,42 +1509,45 @@ class AmazonS3ReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -1529,6 +1558,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1559,14 +1589,63 @@ def __init__( self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AppendVariableActivity(Activity): +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AppendVariableActivity, ExecutePipelineActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + } + + _subtype_map = { + 'type': {'AppendVariable': 'AppendVariableActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + } + + def __init__( + self, + **kwargs + ): + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' # type: str + + +class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -1580,7 +1659,7 @@ class AppendVariableActivity(Activity): :param variable_name: Name of the variable whose value needs to be appended to. :type variable_name: str :param value: Value to be appended. Could be a static value or Expression. - :type value: object + :type value: any """ _validation = { @@ -1609,6 +1688,31 @@ def __init__( self.value = kwargs.get('value', None) +class ArmIdWrapper(msrest.serialization.Model): + """A wrapper for an ARM resource id. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmIdWrapper, self).__init__(**kwargs) + self.id = None + + class AvroDataset(Dataset): """Avro dataset. @@ -1616,31 +1720,31 @@ class AvroDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the avro storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: any :param avro_compression_level: :type avro_compression_level: int """ @@ -1662,7 +1766,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1687,13 +1791,13 @@ class DatasetStorageFormat(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -1729,13 +1833,13 @@ class AvroFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -1761,30 +1865,33 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -1799,10 +1906,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1817,6 +1925,7 @@ def __init__( self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AvroSink(CopySink): @@ -1826,24 +1935,27 @@ class AvroSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Avro format settings. @@ -1862,6 +1974,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -1883,18 +1996,21 @@ class AvroSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -1912,6 +2028,7 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -1936,7 +2053,7 @@ class FormatWriteSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str """ @@ -1970,7 +2087,7 @@ class AvroWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param record_name: Top level record name in write result, which is required in AVRO spec. @@ -1979,11 +2096,11 @@ class AvroWriteSettings(FormatWriteSettings): :type record_namespace: str :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -2080,7 +2197,7 @@ class AzureBatchLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -2090,24 +2207,24 @@ class AzureBatchLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). - :type account_name: object + :type account_name: any :param access_key: The Azure Batch account access key. :type access_key: ~azure.mgmt.datafactory.models.SecretBase :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). - :type batch_uri: object + :type batch_uri: any :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). - :type pool_name: object + :type pool_name: any :param linked_service_name: Required. The Azure Storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -2154,41 +2271,41 @@ class AzureBlobDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param table_root_location: The root of blob path. Type: string (or Expression with resultType string). - :type table_root_location: object + :type table_root_location: any :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -2241,32 +2358,32 @@ class AzureBlobFSDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param format: The format of the Azure Data Lake Storage Gen2 storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -2313,7 +2430,7 @@ class AzureBlobFSLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -2323,30 +2440,30 @@ class AzureBlobFSLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type account_key: object + :type account_key: any :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -2392,18 +2509,18 @@ class AzureBlobFSLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :type file_system: object + :type file_system: any """ _validation = { @@ -2434,39 +2551,42 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -2477,6 +2597,7 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2512,26 +2633,32 @@ class AzureBlobFSSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -2546,7 +2673,9 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2556,6 +2685,7 @@ def __init__( super(AzureBlobFSSink, self).__init__(**kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class AzureBlobFSSource(CopySource): @@ -2565,27 +2695,30 @@ class AzureBlobFSSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object + :type treat_empty_as_null: any :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object + :type skip_header_line_count: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any """ _validation = { @@ -2598,6 +2731,7 @@ class AzureBlobFSSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2624,14 +2758,17 @@ class StoreWriteSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -2642,6 +2779,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2657,6 +2795,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) self.copy_behavior = kwargs.get('copy_behavior', None) @@ -2667,17 +2806,20 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object + :type block_size_in_mb: any """ _validation = { @@ -2688,6 +2830,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -2708,7 +2851,7 @@ class AzureBlobStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -2718,16 +2861,16 @@ class AzureBlobStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is @@ -2735,17 +2878,17 @@ class AzureBlobStorageLinkedService(LinkedService): :type service_endpoint: str :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). @@ -2806,18 +2949,18 @@ class AzureBlobStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param container: Specify the container of azure blob. Type: string (or Expression with resultType string). - :type container: object + :type container: any """ _validation = { @@ -2848,42 +2991,45 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -2894,6 +3040,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2931,17 +3078,20 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object + :type block_size_in_mb: any """ _validation = { @@ -2952,6 +3102,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -2972,31 +3123,31 @@ class AzureDatabricksDeltaLakeDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The name of delta table. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -3038,7 +3189,7 @@ class ExportSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The export setting type.Constant filled by server. :type type: str """ @@ -3072,15 +3223,15 @@ class AzureDatabricksDeltaLakeExportCommand(ExportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The export setting type.Constant filled by server. :type type: str :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: object + :type date_format: any :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: object + :type timestamp_format: any """ _validation = { @@ -3114,7 +3265,7 @@ class ImportSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The import setting type.Constant filled by server. :type type: str """ @@ -3148,15 +3299,15 @@ class AzureDatabricksDeltaLakeImportCommand(ImportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The import setting type.Constant filled by server. :type type: str :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: object + :type date_format: any :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: object + :type timestamp_format: any """ _validation = { @@ -3187,7 +3338,7 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3197,21 +3348,21 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: object + :type domain: any :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). - :type cluster_id: object + :type cluster_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -3251,27 +3402,30 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param import_settings: Azure Databricks Delta Lake import settings. :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ @@ -3288,6 +3442,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3309,21 +3464,24 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param export_settings: Azure Databricks Delta Lake export settings. :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ @@ -3338,6 +3496,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3359,7 +3518,7 @@ class AzureDatabricksLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3369,72 +3528,72 @@ class AzureDatabricksLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: object + :type domain: any :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type authentication: object + :type authentication: any :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type workspace_resource_id: object + :type workspace_resource_id: any :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object + :type existing_cluster_id: any :param instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object + :type instance_pool_id: any :param new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type new_cluster_version: object + :type new_cluster_version: any :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means + auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and + can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object + :type new_cluster_num_of_worker: any :param new_cluster_node_type: The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object + :type new_cluster_node_type: any :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] + :type new_cluster_spark_conf: dict[str, any] :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] + :type new_cluster_spark_env_vars: dict[str, any] :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] + :type new_cluster_custom_tags: dict[str, any] :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: object + :type new_cluster_log_destination: any :param new_cluster_driver_node_type: The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object + :type new_cluster_driver_node_type: any :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object + :type new_cluster_init_scripts: any :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object + :type new_cluster_enable_elastic_disk: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). - :type policy_id: object + :type policy_id: any """ _validation = { @@ -3505,7 +3664,7 @@ class ExecutionActivity(Activity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -3559,7 +3718,7 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -3576,10 +3735,10 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). - :type command: object + :type command: any :param command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :type command_timeout: any """ _validation = { @@ -3618,7 +3777,7 @@ class AzureDataExplorerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3628,23 +3787,23 @@ class AzureDataExplorerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://:code:``.:code:``.kusto.windows.net. Type: string (or Expression with resultType string). - :type endpoint: object + :type endpoint: any :param service_principal_id: The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any """ _validation = { @@ -3687,33 +3846,36 @@ class AzureDataExplorerSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object + :type ingestion_mapping_name: any :param ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. - :type ingestion_mapping_as_json: object + :type ingestion_mapping_as_json: any :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. - :type flush_immediately: object + :type flush_immediately: any """ _validation = { @@ -3728,6 +3890,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -3751,27 +3914,30 @@ class AzureDataExplorerSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. - :type no_truncation: object + :type no_truncation: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -3788,6 +3954,7 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -3813,29 +3980,29 @@ class AzureDataExplorerTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -3872,7 +4039,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3882,32 +4049,32 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). - :type account_name: object + :type account_name: any :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object + :type subscription_id: any :param resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object + :type resource_group_name: any :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). - :type data_lake_analytics_uri: object + :type data_lake_analytics_uri: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -3956,32 +4123,32 @@ class AzureDataLakeStoreDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param format: The format of the Data Lake Store. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the item(s) in the Azure Data Lake @@ -4029,7 +4196,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4039,36 +4206,36 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression with resultType string). - :type data_lake_store_uri: object + :type data_lake_store_uri: any :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :type account_name: object + :type account_name: any :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object + :type subscription_id: any :param resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object + :type resource_group_name: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -4118,15 +4285,15 @@ class AzureDataLakeStoreLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -4155,47 +4322,50 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_after: object + :type list_after: any :param list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_before: object + :type list_before: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -4206,6 +4376,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4245,28 +4416,31 @@ class AzureDataLakeStoreSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object + :type enable_adls_single_file_parallel: any """ _validation = { @@ -4281,6 +4455,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4302,21 +4477,24 @@ class AzureDataLakeStoreSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any """ _validation = { @@ -4329,6 +4507,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4348,18 +4527,21 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). - :type expiry_date_time: object + :type expiry_date_time: any """ _validation = { @@ -4370,6 +4552,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -4390,7 +4573,7 @@ class AzureFileStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4400,34 +4583,34 @@ class AzureFileStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Host name of the server. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: object + :type user_id: any :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). - :type file_share: object + :type file_share: any :param snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). - :type snapshot: object + :type snapshot: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -4478,15 +4661,15 @@ class AzureFileStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -4515,42 +4698,45 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -4561,6 +4747,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4598,14 +4785,17 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -4616,6 +4806,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -4634,7 +4825,7 @@ class AzureFunctionActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -4654,14 +4845,14 @@ class AzureFunctionActivity(ExecutionActivity): :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod :param function_name: Required. Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string). - :type function_name: object + :type function_name: any :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: object + :type headers: any :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :type body: any """ _validation = { @@ -4705,7 +4896,7 @@ class AzureFunctionLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4715,16 +4906,16 @@ class AzureFunctionLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://:code:``.azurewebsites.net. - :type function_app_url: object + :type function_app_url: any :param function_key: Function or Host key for Azure Function App. :type function_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -4762,7 +4953,7 @@ class AzureKeyVaultLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4772,10 +4963,10 @@ class AzureKeyVaultLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object + :type base_url: any """ _validation = { @@ -4845,10 +5036,10 @@ class AzureKeyVaultSecretReference(SecretBase): :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). - :type secret_name: object + :type secret_name: any :param secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object + :type secret_version: any """ _validation = { @@ -4882,7 +5073,7 @@ class AzureMariaDBLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4892,16 +5083,16 @@ class AzureMariaDBLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -4938,27 +5129,30 @@ class AzureMariaDBSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -4971,6 +5165,7 @@ class AzureMariaDBSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -4992,28 +5187,28 @@ class AzureMariaDBTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -5050,7 +5245,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -5069,7 +5264,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. - :type global_parameters: dict[str, object] + :type global_parameters: dict[str, any] :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution @@ -5119,7 +5314,7 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -5136,35 +5331,35 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). - :type ml_pipeline_id: object + :type ml_pipeline_id: any :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :type ml_pipeline_endpoint_id: object + :type ml_pipeline_endpoint_id: any :param version: Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :type version: object + :type version: any :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type experiment_name: object + :type experiment_name: any :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object + :type ml_pipeline_parameters: any :param data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type data_path_assignments: object + :type data_path_assignments: any :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type ml_parent_run_id: object + :type ml_parent_run_id: any :param continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). - :type continue_on_step_failure: object + :type continue_on_step_failure: any """ _validation = { @@ -5214,7 +5409,7 @@ class AzureMLLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5224,29 +5419,29 @@ class AzureMLLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object + :type ml_endpoint: any :param api_key: Required. The API key for accessing the Azure ML model endpoint. :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object + :type update_resource_endpoint: any :param service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5293,7 +5488,7 @@ class AzureMLServiceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5303,30 +5498,30 @@ class AzureMLServiceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). - :type subscription_id: object + :type subscription_id: any :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). - :type resource_group_name: object + :type resource_group_name: any :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or Expression with resultType string). - :type ml_workspace_name: object + :type ml_workspace_name: any :param service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5374,7 +5569,7 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -5391,14 +5586,14 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object + :type trained_model_name: any :param trained_model_linked_service_name: Required. Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). - :type trained_model_file_path: object + :type trained_model_file_path: any """ _validation = { @@ -5441,7 +5636,7 @@ class AzureMLWebServiceFile(msrest.serialization.Model): :param file_path: Required. The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object + :type file_path: any :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference @@ -5473,7 +5668,7 @@ class AzureMySqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5483,16 +5678,16 @@ class AzureMySqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5530,27 +5725,30 @@ class AzureMySqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -5565,6 +5763,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5584,26 +5783,29 @@ class AzureMySqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -5616,6 +5818,7 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5637,32 +5840,32 @@ class AzureMySqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -5701,7 +5904,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5711,16 +5914,16 @@ class AzurePostgreSqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5757,27 +5960,30 @@ class AzurePostgreSqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -5792,6 +5998,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5811,27 +6018,30 @@ class AzurePostgreSqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -5844,6 +6054,7 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5865,35 +6076,35 @@ class AzurePostgreSqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -5934,24 +6145,27 @@ class AzureQueueSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -5966,6 +6180,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -5983,29 +6198,29 @@ class AzureSearchIndexDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). - :type index_name: object + :type index_name: any """ _validation = { @@ -6043,24 +6258,27 @@ class AzureSearchIndexSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType @@ -6078,6 +6296,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6097,7 +6316,7 @@ class AzureSearchLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6107,16 +6326,16 @@ class AzureSearchLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param key: Admin Key for Azure Search service. :type key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -6154,7 +6373,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6164,29 +6383,31 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6208,6 +6429,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( @@ -6223,6 +6445,7 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) class AzureSqlDWLinkedService(LinkedService): @@ -6232,7 +6455,7 @@ class AzureSqlDWLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6242,29 +6465,29 @@ class AzureSqlDWLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -6310,35 +6533,35 @@ class AzureSqlDWTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -6379,7 +6602,7 @@ class AzureSqlMILinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6389,29 +6612,31 @@ class AzureSqlMILinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6433,6 +6658,7 @@ class AzureSqlMILinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( @@ -6448,6 +6674,7 @@ def __init__( self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) class AzureSqlMITableDataset(Dataset): @@ -6457,35 +6684,35 @@ class AzureSqlMITableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -6526,42 +6753,45 @@ class AzureSqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -6576,6 +6806,7 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -6605,39 +6836,42 @@ class AzureSqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object + :type produce_additional_types: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -6652,6 +6886,7 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -6683,35 +6918,35 @@ class AzureSqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -6752,7 +6987,7 @@ class AzureStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6762,15 +6997,15 @@ class AzureStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -6817,29 +7052,29 @@ class AzureTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -6877,36 +7112,39 @@ class AzureTableSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object + :type azure_table_default_partition_key_value: any :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object + :type azure_table_partition_key_name: any :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). - :type azure_table_row_key_name: object + :type azure_table_row_key_name: any :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). - :type azure_table_insert_type: object + :type azure_table_insert_type: any """ _validation = { @@ -6921,6 +7159,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -6946,30 +7185,33 @@ class AzureTableSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). - :type azure_table_source_query: object + :type azure_table_source_query: any :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). - :type azure_table_source_ignore_table_not_found: object + :type azure_table_source_ignore_table_not_found: any """ _validation = { @@ -6982,6 +7224,7 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, @@ -7005,7 +7248,7 @@ class AzureTableStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -7015,15 +7258,15 @@ class AzureTableStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -7070,23 +7313,23 @@ class BinaryDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -7135,7 +7378,7 @@ class FormatReadSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str """ @@ -7169,7 +7412,7 @@ class BinaryReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. @@ -7202,24 +7445,27 @@ class BinarySink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ @@ -7236,6 +7482,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -7255,18 +7502,21 @@ class BinarySource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Binary format settings. @@ -7283,6 +7533,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -7309,7 +7560,7 @@ class Trigger(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -7318,7 +7569,7 @@ class Trigger(msrest.serialization.Model): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] """ _validation = { @@ -7362,7 +7613,7 @@ class MultiplePipelineTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -7371,7 +7622,7 @@ class MultiplePipelineTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ @@ -7412,7 +7663,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -7421,7 +7672,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to @@ -7436,7 +7687,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypesEnum] + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -7482,35 +7733,41 @@ class BlobSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object + :type blob_writer_overwrite_files: any :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). - :type blob_writer_date_time_format: object + :type blob_writer_date_time_format: any :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). - :type blob_writer_add_header: object + :type blob_writer_add_header: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -7525,10 +7782,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -7541,6 +7800,7 @@ def __init__( self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class BlobSource(CopySource): @@ -7550,27 +7810,30 @@ class BlobSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object + :type treat_empty_as_null: any :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object + :type skip_header_line_count: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any """ _validation = { @@ -7583,6 +7846,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -7608,7 +7872,7 @@ class BlobTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -7617,7 +7881,7 @@ class BlobTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. @@ -7667,7 +7931,7 @@ class CassandraLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -7677,25 +7941,25 @@ class CassandraLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. Host name for connection. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). - :type authentication_type: object + :type authentication_type: any :param port: The port for the connection. Type: integer (or Expression with resultType integer). - :type port: object + :type port: any :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -7739,27 +8003,30 @@ class CassandraSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of @@ -7780,6 +8047,7 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -7803,32 +8071,32 @@ class CassandraTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :type keyspace: object + :type keyspace: any """ _validation = { @@ -7869,7 +8137,7 @@ class ChainingTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -7878,7 +8146,7 @@ class ChainingTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference @@ -7965,9 +8233,9 @@ class CmdkeySetup(CustomSetupBase): :param type: Required. The type of custom setup.Constant filled by server. :type type: str :param target_name: Required. The server name of data source access. - :type target_name: object + :type target_name: any :param user_name: Required. The user name of data source access. - :type user_name: object + :type user_name: any :param password: Required. The password of data source access. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -8024,29 +8292,29 @@ class CommonDataServiceForAppsEntityDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: object + :type entity_name: any """ _validation = { @@ -8083,7 +8351,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -8093,49 +8361,46 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType + string). + :type deployment_type: any :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object + :type host_name: any :param port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object + :type service_uri: any :param organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object + :type organization_name: any :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: any :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password to access the Common Data Service for Apps instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: any :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -8145,7 +8410,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -8161,16 +8426,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -8202,34 +8467,37 @@ class CommonDataServiceForAppsSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object + :type alternate_key_name: any """ _validation = { @@ -8245,6 +8513,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -8268,21 +8537,24 @@ class CommonDataServiceForAppsSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -8298,6 +8570,7 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -8356,7 +8629,7 @@ class CompressionReadSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str """ @@ -8390,7 +8663,7 @@ class ConcurLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -8400,31 +8673,31 @@ class ConcurLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object + :type client_id: any :param username: Required. The user name that you use to access Concur Service. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -8473,28 +8746,28 @@ class ConcurObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -8531,27 +8804,30 @@ class ConcurSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -8564,6 +8840,7 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -8613,48 +8890,6 @@ def __init__( self.status = None -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - def __init__( - self, - **kwargs - ): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' # type: str - - class CopyActivity(ExecutionActivity): """Copy activity. @@ -8662,7 +8897,7 @@ class CopyActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -8686,22 +8921,22 @@ class CopyActivity(ExecutionActivity): :param sink: Required. Copy activity sink. :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object + :type translator: any :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object + :type enable_staging: any :param staging_settings: Specifies interim staging settings when EnableStaging is true. :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. - :type parallel_copies: object + :type parallel_copies: any :param data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object + :type data_integration_units: any :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object + :type enable_skip_incompatible_row: any :param redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: @@ -8712,12 +8947,12 @@ class CopyActivity(ExecutionActivity): :param log_settings: Log settings customer needs provide when enabling log. :type log_settings: ~azure.mgmt.datafactory.models.LogSettings :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] + :type preserve_rules: list[any] :param preserve: Preserve rules. - :type preserve: list[object] + :type preserve: list[any] :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). - :type validate_data_consistency: object + :type validate_data_consistency: any :param skip_error_file: Specify the fault tolerance for data consistency. :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ @@ -8787,10 +9022,10 @@ class CopyActivityLogSettings(msrest.serialization.Model): :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: object + :type log_level: any :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: object + :type enable_reliable_logging: any """ _attribute_map = { @@ -8817,7 +9052,7 @@ class CopyTranslator(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy translator type.Constant filled by server. :type type: str """ @@ -8851,7 +9086,7 @@ class CosmosDbLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -8861,21 +9096,21 @@ class CosmosDbLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string). - :type account_endpoint: object + :type account_endpoint: any :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). Possible values include: @@ -8890,18 +9125,18 @@ class CosmosDbLinkedService(LinkedService): :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). Possible values include: "Gateway", "Direct". :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -8954,29 +9189,29 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). - :type collection: object + :type collection: any """ _validation = { @@ -9014,7 +9249,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -9024,14 +9259,14 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -9068,28 +9303,31 @@ class CosmosDbMongoDbApiSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: object + :type write_behavior: any """ _validation = { @@ -9104,6 +9342,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9123,32 +9362,35 @@ class CosmosDbMongoDbApiSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: object + :type filter: any :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -9164,6 +9406,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -9191,29 +9434,29 @@ class CosmosDbSqlApiCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). - :type collection_name: object + :type collection_name: any """ _validation = { @@ -9251,27 +9494,30 @@ class CosmosDbSqlApiSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: object + :type write_behavior: any """ _validation = { @@ -9286,6 +9532,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9305,29 +9552,32 @@ class CosmosDbSqlApiSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: SQL API query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param page_size: Page size of the result. Type: integer (or Expression with resultType integer). - :type page_size: object + :type page_size: any :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). - :type preferred_regions: object + :type preferred_regions: any :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). - :type detect_datetime: object + :type detect_datetime: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -9343,6 +9593,7 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, @@ -9370,7 +9621,7 @@ class CouchbaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -9380,16 +9631,16 @@ class CouchbaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param cred_string: The Azure key vault secret reference of credString in connection string. :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -9426,27 +9677,30 @@ class CouchbaseSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -9459,6 +9713,7 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -9480,28 +9735,28 @@ class CouchbaseTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -9653,7 +9908,7 @@ class CustomActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -9670,24 +9925,24 @@ class CustomActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. Command for custom activity Type: string (or Expression with resultType string). - :type command: object + :type command: any :param resource_linked_service: Resource linked service reference. :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param reference_objects: Reference objects. :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :param extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. - :type extended_properties: dict[str, object] + :type extended_properties: dict[str, any] :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object + :type retention_time_in_days: any :param auto_user_specification: Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). - :type auto_user_specification: object + :type auto_user_specification: any """ _validation = { @@ -9759,28 +10014,28 @@ class CustomDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type_properties: Custom dataset properties. - :type type_properties: object + :type type_properties: any """ _validation = { @@ -9817,7 +10072,7 @@ class CustomDataSourceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -9827,9 +10082,9 @@ class CustomDataSourceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param type_properties: Required. Custom linked service properties. - :type type_properties: object + :type type_properties: any """ _validation = { @@ -9865,7 +10120,7 @@ class CustomEventsTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -9874,7 +10129,7 @@ class CustomEventsTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param subject_begins_with: The event subject must begin with the pattern provided for trigger @@ -9884,7 +10139,7 @@ class CustomEventsTrigger(MultiplePipelineTrigger): fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. :type subject_ends_with: str :param events: Required. The list of event types that cause this trigger to fire. - :type events: list[object] + :type events: list[any] :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. :type scope: str """ @@ -9928,7 +10183,7 @@ class DatabricksNotebookActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -9946,12 +10201,12 @@ class DatabricksNotebookActivity(ExecutionActivity): :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). - :type notebook_path: object + :type notebook_path: any :param base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] + :type base_parameters: dict[str, any] :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :type libraries: list[dict[str, any]] """ _validation = { @@ -9992,7 +10247,7 @@ class DatabricksSparkJarActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -10010,11 +10265,11 @@ class DatabricksSparkJarActivity(ExecutionActivity): :param main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). - :type main_class_name: object + :type main_class_name: any :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] + :type parameters: list[any] :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :type libraries: list[dict[str, any]] """ _validation = { @@ -10055,7 +10310,7 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -10072,11 +10327,11 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). - :type python_file: object + :type python_file: any :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] + :type parameters: list[any] :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :type libraries: list[dict[str, any]] """ _validation = { @@ -10116,17 +10371,23 @@ class DataFlow(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: MappingDataFlow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -10242,7 +10503,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param session_id: The ID of data flow debug session. :type session_id: str :param data_flow: Data flow instance. @@ -10287,9 +10548,9 @@ class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): :param source_settings: Source setting for data flow debug. :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :param parameters: Data flow parameters. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object + :type dataset_parameters: any """ _attribute_map = { @@ -10360,7 +10621,7 @@ class DataFlowDebugSessionInfo(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param data_flow_name: The name of the data flow. :type data_flow_name: str :param compute_type: Compute type of the cluster. @@ -10468,13 +10729,13 @@ class DataFlowReference(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". + :type additional_properties: dict[str, any] + :ivar type: Data flow reference type. Has constant value: "DataFlowReference". :vartype type: str :param reference_name: Required. Reference data flow name. :type reference_name: str :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object + :type dataset_parameters: any """ _validation = { @@ -10696,7 +10957,7 @@ class DataFlowSourceSetting(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param source_name: The data flow source name. :type source_name: str :param row_limit: Defines the row limit of data flow source in debug. @@ -10726,7 +10987,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any """ _attribute_map = { @@ -10750,7 +11011,7 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -10767,24 +11028,24 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object + :type script_path: any :param script_linked_service: Required. Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object + :type degree_of_parallelism: any :param priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. - :type priority: object + :type priority: any :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). - :type runtime_version: object + :type runtime_version: any :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object + :type compilation_mode: any """ _validation = { @@ -10837,7 +11098,7 @@ class DatasetCompression(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str """ @@ -10871,7 +11132,7 @@ class DatasetBZip2Compression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str """ @@ -10897,9 +11158,9 @@ class DatasetDataElement(msrest.serialization.Model): """Columns that define the structure of the dataset. :param name: Name of the column. Type: string (or Expression with resultType string). - :type name: object + :type name: any :param type: Type of the column. Type: string (or Expression with resultType string). - :type type: object + :type type: any """ _attribute_map = { @@ -10951,11 +11212,11 @@ class DatasetDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: any """ _validation = { @@ -10965,7 +11226,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11003,11 +11264,11 @@ class DatasetGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: any """ _validation = { @@ -11017,7 +11278,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11065,12 +11326,12 @@ class DatasetReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". + :ivar type: Dataset reference type. Has constant value: "DatasetReference". :vartype type: str :param reference_name: Required. Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _validation = { @@ -11143,11 +11404,11 @@ class DatasetSchemaDataElement(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Name of the schema column. Type: string (or Expression with resultType string). - :type name: object + :type name: any :param type: Type of the schema column. Type: string (or Expression with resultType string). - :type type: object + :type type: any """ _attribute_map = { @@ -11173,7 +11434,7 @@ class DatasetTarCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str """ @@ -11202,11 +11463,11 @@ class DatasetTarGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The TarGZip compression level. + :type level: any """ _validation = { @@ -11216,7 +11477,7 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11235,11 +11496,11 @@ class DatasetZipDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: any """ _validation = { @@ -11249,7 +11510,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11268,7 +11529,7 @@ class Db2LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -11278,36 +11539,36 @@ class Db2LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: "Basic". :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type package_collection: object + :type package_collection: any :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type certificate_common_name: object + :type certificate_common_name: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -11356,26 +11617,29 @@ class Db2Source(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -11388,6 +11652,7 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -11409,34 +11674,34 @@ class Db2TableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -11477,7 +11742,7 @@ class DeleteActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -11494,13 +11759,13 @@ class DeleteActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param max_concurrent_connections: The max concurrent connections to connect data source at the same time. :type max_concurrent_connections: int :param enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object + :type enable_logging: any :param log_storage_settings: Log storage settings customer need to provide when enableLogging is true. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings @@ -11574,23 +11839,23 @@ class DelimitedTextDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -11598,31 +11863,30 @@ class DelimitedTextDataset(Dataset): :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: object + :type column_delimiter: any :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object + :type row_delimiter: any :param encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~azure.mgmt.datafactory.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :type encoding_name: any + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: any + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: any :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object + :type quote_char: any :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object + :type escape_char: any :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object + :type first_row_as_header: any :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any """ _validation = { @@ -11644,8 +11908,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -11677,12 +11941,12 @@ class DelimitedTextReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). - :type skip_line_count: object + :type skip_line_count: any :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ @@ -11715,24 +11979,27 @@ class DelimitedTextSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. @@ -11751,6 +12018,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -11772,18 +12040,21 @@ class DelimitedTextSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. @@ -11803,6 +12074,7 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -11826,22 +12098,22 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). - :type quote_all_text: object + :type quote_all_text: any :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). - :type file_extension: object + :type file_extension: any :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -11909,14 +12181,14 @@ class DistcpSettings(msrest.serialization.Model): :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). - :type resource_manager_endpoint: object + :type resource_manager_endpoint: any :param temp_script_path: Required. Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). - :type temp_script_path: object + :type temp_script_path: any :param distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). - :type distcp_options: object + :type distcp_options: any """ _validation = { @@ -11947,29 +12219,29 @@ class DocumentDbCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. Document Database collection name. Type: string (or Expression with resultType string). - :type collection_name: object + :type collection_name: any """ _validation = { @@ -12007,30 +12279,33 @@ class DocumentDbCollectionSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). - :type nesting_separator: object + :type nesting_separator: any :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: object + :type write_behavior: any """ _validation = { @@ -12045,6 +12320,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -12066,26 +12342,29 @@ class DocumentDbCollectionSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Documents query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). - :type nesting_separator: object + :type nesting_separator: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -12101,6 +12380,7 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -12126,7 +12406,7 @@ class DrillLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -12136,16 +12416,16 @@ class DrillLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -12182,27 +12462,30 @@ class DrillSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -12215,6 +12498,7 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -12236,34 +12520,34 @@ class DrillTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -12301,10 +12585,10 @@ class DWCopyCommandDefaultValue(msrest.serialization.Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). - :type column_name: object + :type column_name: any :param default_value: The default value of the column. Type: object (or Expression with resultType string). - :type default_value: object + :type default_value: any """ _attribute_map = { @@ -12356,7 +12640,7 @@ class DynamicsAXLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -12366,13 +12650,13 @@ class DynamicsAXLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. - :type url: object + :type url: any :param service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). @@ -12380,14 +12664,14 @@ class DynamicsAXLinkedService(LinkedService): :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object + :type aad_resource_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -12435,29 +12719,29 @@ class DynamicsAXResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -12495,32 +12779,35 @@ class DynamicsAXSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -12533,6 +12820,7 @@ class DynamicsAXSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -12556,29 +12844,29 @@ class DynamicsCrmEntityDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: object + :type entity_name: any """ _validation = { @@ -12615,7 +12903,7 @@ class DynamicsCrmLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -12625,47 +12913,43 @@ class DynamicsCrmLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: any :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object + :type host_name: any :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object + :type service_uri: any :param organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object + :type organization_name: any :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: any :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password to access the Dynamics CRM instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: any :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -12675,7 +12959,7 @@ class DynamicsCrmLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -12691,16 +12975,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -12732,34 +13016,37 @@ class DynamicsCrmSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object + :type alternate_key_name: any """ _validation = { @@ -12775,6 +13062,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -12798,21 +13086,24 @@ class DynamicsCrmSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -12828,6 +13119,7 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -12849,29 +13141,29 @@ class DynamicsEntityDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: object + :type entity_name: any """ _validation = { @@ -12908,7 +13200,7 @@ class DynamicsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -12918,44 +13210,42 @@ class DynamicsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: any :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object + :type host_name: any :param port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object + :type port: any + :param service_uri: The URL to the Microsoft Dynamics server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: any :param organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object + :type organization_name: any :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: any :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password to access the Dynamics instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -12965,7 +13255,7 @@ class DynamicsLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -12981,12 +13271,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -13022,34 +13312,37 @@ class DynamicsSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object + :type alternate_key_name: any """ _validation = { @@ -13065,6 +13358,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -13088,21 +13382,24 @@ class DynamicsSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -13118,6 +13415,7 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -13139,7 +13437,7 @@ class EloquaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -13149,28 +13447,28 @@ class EloquaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object + :type endpoint: any :param username: Required. The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice). - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -13217,28 +13515,28 @@ class EloquaObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -13275,27 +13573,30 @@ class EloquaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -13308,6 +13609,7 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -13367,7 +13669,7 @@ class EntityReference(msrest.serialization.Model): """The entity reference. :param type: The type of this referenced entity. Possible values include: - "IntegrationRuntimeReference", "LinkedServiceReference". + "IntegrationRuntimeReference", "LinkedServiceReference", "CredentialReference". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str @@ -13429,42 +13731,45 @@ class ExcelDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the excel storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). - :type sheet_name: object + :type sheet_name: any + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: any :param range: The partial data of one sheet. Type: string (or Expression with resultType string). - :type range: object + :type range: any :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object + :type first_row_as_header: any :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any """ _validation = { @@ -13484,6 +13789,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -13498,6 +13804,7 @@ def __init__( self.type = 'Excel' # type: str self.location = kwargs.get('location', None) self.sheet_name = kwargs.get('sheet_name', None) + self.sheet_index = kwargs.get('sheet_index', None) self.range = kwargs.get('range', None) self.first_row_as_header = kwargs.get('first_row_as_header', None) self.compression = kwargs.get('compression', None) @@ -13511,18 +13818,21 @@ class ExcelSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Excel store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -13540,6 +13850,7 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -13561,7 +13872,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -13586,14 +13897,14 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :param trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: object + :type trace_level: any :param continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: object + :type continue_on_error: any :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: object + :type run_concurrently: any """ _validation = { @@ -13641,10 +13952,10 @@ class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string). - :type compute_type: object + :type compute_type: any :param core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :type core_count: object + :type core_count: any """ _attribute_map = { @@ -13661,14 +13972,14 @@ def __init__( self.core_count = kwargs.get('core_count', None) -class ExecutePipelineActivity(Activity): +class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -13682,7 +13993,7 @@ class ExecutePipelineActivity(Activity): :param pipeline: Required. Pipeline reference. :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :param wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. :type wait_on_completion: bool @@ -13724,7 +14035,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -13743,13 +14054,13 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). - :type runtime: object + :type runtime: any :param logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). - :type logging_level: object + :type logging_level: any :param environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). - :type environment_path: object + :type environment_path: any :param execution_credential: The package execution credential. :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential :param connect_via: Required. The integration runtime reference. @@ -13760,10 +14071,12 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. - :type project_connection_managers: dict[str, object] + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] :param package_connection_managers: The package level connection managers to execute the SSIS package. - :type package_connection_managers: dict[str, object] + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] :param property_overrides: The property overrides to execute the SSIS package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] :param log_location: SSIS package execution log location. @@ -13794,8 +14107,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } @@ -13930,7 +14243,7 @@ class Expression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Expression type. Default value: "Expression". + :ivar type: Expression type. Has constant value: "Expression". :vartype type: str :param value: Required. Expression value. :type value: str @@ -14023,7 +14336,7 @@ class Factory(Resource): :vartype e_tag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param identity: Managed service identity of the factory. :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. @@ -14206,7 +14519,7 @@ class FactoryIdentity(msrest.serialization.Model): :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str :param user_assigned_identities: List of user assigned identities for the factory. - :type user_assigned_identities: dict[str, object] + :type user_assigned_identities: dict[str, any] """ _validation = { @@ -14368,7 +14681,7 @@ class FileServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -14378,19 +14691,19 @@ class FileServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. Host name of the server. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: object + :type user_id: any :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -14430,15 +14743,15 @@ class FileServerLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -14467,42 +14780,45 @@ class FileServerReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: object + :type file_filter: any """ _validation = { @@ -14513,6 +14829,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -14550,14 +14867,17 @@ class FileServerWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -14568,6 +14888,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14586,43 +14907,43 @@ class FileShareDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: object + :type file_filter: any :param compression: The data compression method used for the file system. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ @@ -14673,26 +14994,29 @@ class FileSystemSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -14707,6 +15031,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14726,21 +15051,24 @@ class FileSystemSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -14756,6 +15084,7 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -14770,14 +15099,14 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class FilterActivity(Activity): +class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -14822,14 +15151,14 @@ def __init__( self.condition = kwargs['condition'] -class ForEachActivity(Activity): +class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -14891,33 +15220,36 @@ class FtpReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool """ @@ -14930,6 +15262,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -14963,7 +15296,7 @@ class FtpServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -14973,32 +15306,32 @@ class FtpServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "Anonymous". :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to logon the FTP server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object + :type enable_ssl: any :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :type enable_server_certificate_validation: any """ _validation = { @@ -15046,15 +15379,15 @@ class FtpServerLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -15081,7 +15414,7 @@ class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param status: Status of the operation. :type status: str """ @@ -15107,7 +15440,7 @@ class GetMetadataActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -15125,7 +15458,7 @@ class GetMetadataActivity(ExecutionActivity): :param dataset: Required. GetMetadata activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] + :type field_list: list[any] :param store_settings: GetMetadata activity store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: GetMetadata activity format settings. @@ -15246,7 +15579,7 @@ class GlobalParameterSpecification(msrest.serialization.Model): "Int", "Float", "Bool", "Array". :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :param value: Required. Value of parameter. - :type value: object + :type value: any """ _validation = { @@ -15275,7 +15608,7 @@ class GoogleAdWordsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -15285,10 +15618,10 @@ class GoogleAdWordsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param client_customer_id: Required. The Client customer ID of the AdWords account that you want to fetch report data for. - :type client_customer_id: object + :type client_customer_id: any :param developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. :type developer_token: ~azure.mgmt.datafactory.models.SecretBase @@ -15302,27 +15635,27 @@ class GoogleAdWordsLinkedService(LinkedService): :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret of the google application used to acquire the refresh token. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: object + :type email: any :param key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: object + :type key_file_path: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -15378,28 +15711,28 @@ class GoogleAdWordsObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -15436,27 +15769,30 @@ class GoogleAdWordsSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -15469,6 +15805,7 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -15490,7 +15827,7 @@ class GoogleBigQueryLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -15500,15 +15837,15 @@ class GoogleBigQueryLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param project: Required. The default BigQuery project to query against. - :type project: object + :type project: any :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object + :type additional_projects: any :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. - :type request_google_drive_scope: object + :type request_google_drive_scope: any :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". @@ -15519,27 +15856,27 @@ class GoogleBigQueryLinkedService(LinkedService): :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret of the google application used to acquire the refresh token. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: object + :type email: any :param key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: object + :type key_file_path: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -15596,35 +15933,35 @@ class GoogleBigQueryObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using database + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Google BigQuery. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). - :type dataset: object + :type dataset: any """ _validation = { @@ -15665,27 +16002,30 @@ class GoogleBigQuerySource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -15698,6 +16038,7 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -15719,7 +16060,7 @@ class GoogleCloudStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -15729,10 +16070,10 @@ class GoogleCloudStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -15740,11 +16081,11 @@ class GoogleCloudStorageLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -15783,21 +16124,21 @@ class GoogleCloudStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -15830,42 +16171,45 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -15876,6 +16220,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -15913,7 +16258,7 @@ class GreenplumLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -15923,16 +16268,16 @@ class GreenplumLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -15969,27 +16314,30 @@ class GreenplumSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -16002,6 +16350,7 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -16023,34 +16372,34 @@ class GreenplumTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -16091,7 +16440,7 @@ class HBaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -16101,39 +16450,39 @@ class HBaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. - :type port: object + :type port: any :param http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version). - :type http_path: object + :type http_path: any :param authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: "Anonymous", "Basic". :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -16188,28 +16537,28 @@ class HBaseObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -16246,27 +16595,30 @@ class HBaseSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -16279,6 +16631,7 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -16300,7 +16653,7 @@ class HdfsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -16310,20 +16663,20 @@ class HdfsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object + :type authentication_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param user_name: User name for Windows authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Windows authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -16367,15 +16720,15 @@ class HdfsLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -16404,41 +16757,44 @@ class HdfsReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any """ _validation = { @@ -16449,6 +16805,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16486,21 +16843,24 @@ class HdfsSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ @@ -16515,6 +16875,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -16536,7 +16897,7 @@ class HDInsightHiveActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -16554,17 +16915,17 @@ class HDInsightHiveActivity(ExecutionActivity): :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object + :type script_path: any :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] + :type defines: dict[str, any] :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] + :type variables: list[any] :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package). :type query_timeout: int @@ -16617,7 +16978,7 @@ class HDInsightLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -16627,13 +16988,13 @@ class HDInsightLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with resultType string). - :type cluster_uri: object + :type cluster_uri: any :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: HDInsight cluster password. :type password: ~azure.mgmt.datafactory.models.SecretBase :param linked_service_name: The Azure Storage linked service reference. @@ -16644,13 +17005,13 @@ class HDInsightLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object + :type is_esp_enabled: any :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). - :type file_system: object + :type file_system: any """ _validation = { @@ -16698,7 +17059,7 @@ class HDInsightMapReduceActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -16716,19 +17077,19 @@ class HDInsightMapReduceActivity(ExecutionActivity): :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object + :type class_name: any :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object + :type jar_file_path: any :param jar_linked_service: Jar linked service reference. :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param jar_libs: Jar libs. - :type jar_libs: list[object] + :type jar_libs: list[any] :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] + :type defines: dict[str, any] """ _validation = { @@ -16780,7 +17141,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -16790,46 +17151,46 @@ class HDInsightOnDemandLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object + :type cluster_size: any :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). - :type time_to_live: object + :type time_to_live: any :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with resultType string). - :type version: object + :type version: any :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object + :type host_subscription_id: any :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key for the service principal id. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object + :type cluster_resource_group: any :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object + :type cluster_name_prefix: any :param cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). - :type cluster_user_name: object + :type cluster_user_name: any :param cluster_password: The password to access the cluster. :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object + :type cluster_ssh_user_name: any :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight @@ -16841,56 +17202,55 @@ class HDInsightOnDemandLinkedService(LinkedService): as the metastore. :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object + :type cluster_type: any :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). - :type spark_version: object + :type spark_version: any :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object + :type core_configuration: any :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object + :type h_base_configuration: any :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object + :type hdfs_configuration: any :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object + :type hive_configuration: any + :param map_reduce_configuration: Specifies the MapReduce configuration parameters + (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: any :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object + :type oozie_configuration: any :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object + :type storm_configuration: any :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object + :type yarn_configuration: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object + :type head_node_size: any :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object + :type data_node_size: any :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. - :type zookeeper_node_size: object + :type zookeeper_node_size: any :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object + :type virtual_network_id: any :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object + :type subnet_name: any """ _validation = { @@ -16994,7 +17354,7 @@ class HDInsightPigActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -17013,15 +17373,15 @@ class HDInsightPigActivity(ExecutionActivity): :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). - :type arguments: object + :type arguments: any :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object + :type script_path: any :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] + :type defines: dict[str, any] """ _validation = { @@ -17067,7 +17427,7 @@ class HDInsightSparkActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -17084,12 +17444,12 @@ class HDInsightSparkActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object + :type root_path: any :param entry_file_path: Required. The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object + :type entry_file_path: any :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param spark_job_linked_service: The storage linked service for uploading the entry file and @@ -17099,9 +17459,9 @@ class HDInsightSparkActivity(ExecutionActivity): :type class_name: str :param proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). - :type proxy_user: object + :type proxy_user: any :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] + :type spark_config: dict[str, any] """ _validation = { @@ -17153,7 +17513,7 @@ class HDInsightStreamingActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -17171,29 +17531,29 @@ class HDInsightStreamingActivity(ExecutionActivity): :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). - :type mapper: object + :type mapper: any :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType string). - :type reducer: object + :type reducer: any :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object + :type input: any :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object + :type output: any :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] + :type file_paths: list[any] :param file_linked_service: Linked service reference where the files are located. :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object + :type combiner: any :param command_environment: Command line environment values. - :type command_environment: list[object] + :type command_environment: list[any] :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] + :type defines: dict[str, any] """ _validation = { @@ -17256,7 +17616,7 @@ class HiveLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -17266,12 +17626,12 @@ class HiveLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object + :type host: any :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object + :type port: any :param server_type: The type of Hive server. Possible values include: "HiveServer1", "HiveServer2", "HiveThriftServer". :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType @@ -17284,40 +17644,40 @@ class HiveLinkedService(LinkedService): "WindowsAzureHDInsightService". :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object + :type service_discovery_mode: any :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. - :type zoo_keeper_name_space: object + :type zoo_keeper_name_space: any :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object + :type use_native_query: any :param username: The user name that you use to access Hive Server. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the Username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object + :type http_path: any :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -17384,34 +17744,34 @@ class HiveObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -17452,27 +17812,30 @@ class HiveSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -17485,6 +17848,7 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -17506,41 +17870,41 @@ class HttpDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). - :type relative_url: object + :type relative_url: any :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The body for the HTTP request. Type: string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. request-header- - name-1:request-header-value-1 + :type request_body: any + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used on files. @@ -17591,7 +17955,7 @@ class HttpLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -17601,39 +17965,39 @@ class HttpLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: object + :type auth_headers: any :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :type embedded_cert_data: object + :type embedded_cert_data: any :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :type cert_thumbprint: object + :type cert_thumbprint: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :type enable_server_certificate_validation: any """ _validation = { @@ -17683,29 +18047,32 @@ class HttpReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: object + :type request_body: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. - :type request_timeout: object + :type request_timeout: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any """ _validation = { @@ -17716,6 +18083,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -17745,18 +18113,18 @@ class HttpServerLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :type relative_url: object + :type relative_url: any """ _validation = { @@ -17787,23 +18155,26 @@ class HttpSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -17816,6 +18187,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -17835,7 +18207,7 @@ class HubspotLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -17845,9 +18217,9 @@ class HubspotLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with your Hubspot application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token obtained when initially authenticating your OAuth @@ -17858,18 +18230,18 @@ class HubspotLinkedService(LinkedService): :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -17917,28 +18289,28 @@ class HubspotObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -17975,27 +18347,30 @@ class HubspotSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -18008,6 +18383,7 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18022,14 +18398,14 @@ def __init__( self.query = kwargs.get('query', None) -class IfConditionActivity(Activity): +class IfConditionActivity(ControlActivity): """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -18088,7 +18464,7 @@ class ImpalaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -18098,41 +18474,41 @@ class ImpalaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Impala server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. - :type port: object + :type port: any :param authentication_type: Required. The authentication type to use. Possible values include: "Anonymous", "SASLUsername", "UsernameAndPassword". :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. - :type username: object + :type username: any :param password: The password corresponding to the user name when using UsernameAndPassword. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -18187,35 +18563,35 @@ class ImpalaObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Impala. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -18256,27 +18632,30 @@ class ImpalaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -18289,6 +18668,7 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18310,7 +18690,7 @@ class InformixLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -18320,27 +18700,27 @@ class InformixLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. + :type authentication_type: any + :param credential: The access credential portion of the connection string specified in + driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -18384,27 +18764,30 @@ class InformixSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -18419,6 +18802,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18438,26 +18822,29 @@ class InformixSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -18470,6 +18857,7 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18491,29 +18879,29 @@ class InformixTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Informix table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -18553,7 +18941,7 @@ class IntegrationRuntime(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -18613,10 +19001,10 @@ class IntegrationRuntimeComputeProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param location: The location for managed integration runtime. The supported regions could be - found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- - activities. + found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. :type location: str :param node_size: The node size requirement to managed integration runtime. :type node_size: str @@ -18667,7 +19055,7 @@ class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar service_token: The token generated in service. Callers use this token to authenticate to integration runtime. :vartype service_token: str @@ -18747,7 +19135,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: "General", "MemoryOptimized", "ComputeOptimized". :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType @@ -18920,7 +19308,7 @@ class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. @@ -18979,6 +19367,93 @@ def __init__( self.received_bytes = None +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints for one category. + + :param category: The category of outbound network dependency. + :type category: str + :param endpoints: The endpoints for outbound network dependency. + :type endpoints: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + self.category = kwargs.get('category', None) + self.endpoints = kwargs.get('endpoints', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): + """The endpoint for Azure-SSIS integration runtime outbound network dependency. + + :param domain_name: The domain name of endpoint. + :type domain_name: str + :param endpoint_details: The details of endpoint. + :type endpoint_details: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + self.domain_name = kwargs.get('domain_name', None) + self.endpoint_details = kwargs.get('endpoint_details', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): + """The details of Azure-SSIS integration runtime outbound network dependency endpoint. + + :param port: The port of endpoint. + :type port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + self.port = kwargs.get('port', None) + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints. + + :param value: The list of outbound network dependency endpoints. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. @@ -18986,13 +19461,12 @@ class IntegrationRuntimeReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference". + :ivar type: Type of integration runtime. Has constant value: "IntegrationRuntimeReference". :vartype type: str :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _validation = { @@ -19085,7 +19559,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param catalog_server_endpoint: The catalog database server URL. :type catalog_server_endpoint: str :param catalog_admin_user_name: The administrator user name of catalog database. @@ -19134,7 +19608,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param catalog_info: Catalog information for managed dedicated integration runtime. :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :param license_type: License type for bringing your own license scenario. Possible values @@ -19156,6 +19630,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :param managed_credential: The user-assigned managed identity reference. + :type managed_credential: ~azure.mgmt.datafactory.models.EntityReference """ _attribute_map = { @@ -19167,6 +19643,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'managed_credential': {'key': 'managedCredential', 'type': 'EntityReference'}, } def __init__( @@ -19182,6 +19659,7 @@ def __init__( self.edition = kwargs.get('edition', None) self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) self.package_stores = kwargs.get('package_stores', None) + self.managed_credential = kwargs.get('managed_credential', None) class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -19196,7 +19674,7 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -19302,7 +19780,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param v_net_id: The ID of the VNet that this integration runtime will join. :type v_net_id: str :param subnet: The name of the subnet this integration runtime will join. @@ -19310,6 +19788,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. :type public_i_ps: list[str] + :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :type subnet_id: str """ _attribute_map = { @@ -19317,6 +19798,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, } def __init__( @@ -19328,6 +19810,7 @@ def __init__( self.v_net_id = kwargs.get('v_net_id', None) self.subnet = kwargs.get('subnet', None) self.public_i_ps = kwargs.get('public_i_ps', None) + self.subnet_id = kwargs.get('subnet_id', None) class JiraLinkedService(LinkedService): @@ -19337,7 +19820,7 @@ class JiraLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -19347,32 +19830,32 @@ class JiraLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Jira service. (e.g. jira.example.com). - :type host: object + :type host: any :param port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object + :type port: any :param username: Required. The user name that you use to access Jira Service. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -19421,28 +19904,28 @@ class JiraObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -19479,27 +19962,30 @@ class JiraSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -19512,6 +19998,7 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -19533,23 +20020,23 @@ class JsonDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -19560,7 +20047,7 @@ class JsonDataset(Dataset): of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ @@ -19603,35 +20090,34 @@ class JsonFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~azure.mgmt.datafactory.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: any :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). - :type nesting_separator: object + :type nesting_separator: any :param encoding_name: The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). - :type json_node_reference: object + :type json_node_reference: any :param json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). - :type json_path_definition: object + :type json_path_definition: any """ _validation = { @@ -19643,7 +20129,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -19670,7 +20156,7 @@ class JsonReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. @@ -19703,24 +20189,27 @@ class JsonSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Json format settings. @@ -19739,6 +20228,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -19760,18 +20250,21 @@ class JsonSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Json format settings. @@ -19791,6 +20284,7 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -19814,13 +20308,12 @@ class JsonWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~azure.mgmt.datafactory.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: any """ _validation = { @@ -19830,7 +20323,7 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( @@ -20073,12 +20566,12 @@ class LinkedServiceReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". + :ivar type: Linked service reference type. Has constant value: "LinkedServiceReference". :vartype type: str :param reference_name: Required. Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _validation = { @@ -20155,7 +20648,7 @@ class LogLocationSettings(msrest.serialization.Model): :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -20183,7 +20676,7 @@ class LogSettings(msrest.serialization.Model): :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). - :type enable_copy_activity_log: object + :type enable_copy_activity_log: any :param copy_activity_log_settings: Specifies settings for copy activity log. :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings :param log_location_settings: Required. Log location settings customer needs to provide when @@ -20218,18 +20711,18 @@ class LogStorageSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param linked_service_name: Required. Log storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: object + :type path: any :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: object + :type log_level: any :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: object + :type enable_reliable_logging: any """ _validation = { @@ -20263,7 +20756,7 @@ class LookupActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -20284,7 +20777,7 @@ class LookupActivity(ExecutionActivity): :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object + :type first_row_only: any """ _validation = { @@ -20326,7 +20819,7 @@ class MagentoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -20336,25 +20829,25 @@ class MagentoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: object + :type host: any :param access_token: The access token from Magento. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -20398,28 +20891,28 @@ class MagentoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -20456,27 +20949,30 @@ class MagentoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -20489,6 +20985,7 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -20512,7 +21009,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -20564,7 +21061,7 @@ class ManagedIntegrationRuntimeError(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar time: The time when the error occurred. :vartype time: ~datetime.datetime :ivar code: Error code. @@ -20609,7 +21106,7 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: "Starting", @@ -20649,7 +21146,7 @@ class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. @@ -20706,7 +21203,7 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -20767,7 +21264,7 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param connection_state: The managed private endpoint connection state. :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :param fqdns: Fully qualified domain names. @@ -20891,7 +21388,7 @@ class ManagedVirtualNetwork(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar v_net_id: Managed Virtual Network ID. :vartype v_net_id: str :ivar alias: Managed Virtual Network alias. @@ -20955,7 +21452,7 @@ class ManagedVirtualNetworkReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Managed Virtual Network reference type. Default value: + :ivar type: Managed Virtual Network reference type. Has constant value: "ManagedVirtualNetworkReference". :vartype type: str :param reference_name: Required. Reference ManagedVirtualNetwork name. @@ -21028,12 +21525,14 @@ def __init__( class MappingDataFlow(DataFlow): """Mapping data flow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -21047,6 +21546,10 @@ class MappingDataFlow(DataFlow): :type script: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -21077,7 +21580,7 @@ class MariaDBLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -21087,16 +21590,16 @@ class MariaDBLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -21133,27 +21636,30 @@ class MariaDBSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -21166,6 +21672,7 @@ class MariaDBSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21187,28 +21694,28 @@ class MariaDBTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -21245,7 +21752,7 @@ class MarketoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -21255,27 +21762,27 @@ class MarketoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: object + :type endpoint: any :param client_id: Required. The client Id of your Marketo service. - :type client_id: object + :type client_id: any :param client_secret: The client secret of your Marketo service. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -21322,28 +21829,28 @@ class MarketoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -21380,27 +21887,30 @@ class MarketoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -21413,6 +21923,7 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21427,6 +21938,29 @@ def __init__( self.query = kwargs.get('query', None) +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: any + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: any + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -21434,7 +21968,7 @@ class MicrosoftAccessLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -21444,27 +21978,27 @@ class MicrosoftAccessLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. + :type authentication_type: any + :param credential: The access credential portion of the connection string specified in + driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -21508,27 +22042,30 @@ class MicrosoftAccessSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -21543,6 +22080,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21562,20 +22100,23 @@ class MicrosoftAccessSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -21591,6 +22132,7 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -21612,29 +22154,29 @@ class MicrosoftAccessTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -21671,29 +22213,29 @@ class MongoDbAtlasCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). - :type collection: object + :type collection: any """ _validation = { @@ -21731,7 +22273,7 @@ class MongoDbAtlasLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -21741,14 +22283,14 @@ class MongoDbAtlasLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param database: Required. The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -21778,6 +22320,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: any + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: any + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: any + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: any + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -21785,32 +22386,35 @@ class MongoDbAtlasSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: object + :type filter: any :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -21826,6 +22430,7 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -21853,29 +22458,29 @@ class MongoDbCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection_name: object + :type collection_name: any """ _validation = { @@ -21911,22 +22516,22 @@ class MongoDbCursorMethodsProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). - :type project: object + :type project: any :param sort: Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type sort: object + :type sort: any :param skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). - :type skip: object + :type skip: any :param limit: Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). - :type limit: object + :type limit: any """ _attribute_map = { @@ -21956,7 +22561,7 @@ class MongoDbLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -21966,37 +22571,37 @@ class MongoDbLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database_name: object + :type database_name: any :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). - :type auth_source: object + :type auth_source: any :param port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object + :type enable_ssl: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -22049,21 +22654,24 @@ class MongoDbSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -22079,6 +22687,7 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -22100,29 +22709,29 @@ class MongoDbV2CollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection: object + :type collection: any """ _validation = { @@ -22160,7 +22769,7 @@ class MongoDbV2LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -22170,13 +22779,13 @@ class MongoDbV2LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param database: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -22206,6 +22815,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: any + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: any + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: any + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: any + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(**kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -22213,32 +22881,35 @@ class MongoDbV2Source(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: object + :type filter: any :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -22254,6 +22925,7 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -22281,7 +22953,7 @@ class MySqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -22291,15 +22963,15 @@ class MySqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -22337,26 +23009,29 @@ class MySqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -22369,6 +23044,7 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -22390,28 +23066,28 @@ class MySqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -22448,7 +23124,7 @@ class NetezzaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -22458,16 +23134,16 @@ class NetezzaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -22502,15 +23178,15 @@ class NetezzaPartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -22536,30 +23212,33 @@ class NetezzaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ @@ -22574,6 +23253,7 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -22599,35 +23279,35 @@ class NetezzaTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Netezza. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -22668,7 +23348,7 @@ class ODataLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -22678,35 +23358,35 @@ class ODataLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: object + :type auth_headers: any :param tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). - :type aad_resource_id: object + :type aad_resource_id: any :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or @@ -22725,7 +23405,7 @@ class ODataLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -22785,28 +23465,28 @@ class ODataResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -22843,26 +23523,29 @@ class ODataSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -22878,6 +23561,7 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -22901,7 +23585,7 @@ class OdbcLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -22911,26 +23595,26 @@ class OdbcLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. + :type authentication_type: any + :param credential: The access credential portion of the connection string specified in + driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -22974,27 +23658,30 @@ class OdbcSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -23009,6 +23696,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -23028,26 +23716,29 @@ class OdbcSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -23060,6 +23751,7 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -23081,28 +23773,28 @@ class OdbcTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The ODBC table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -23139,32 +23831,32 @@ class Office365Dataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: object + :type predicate: any """ _validation = { @@ -23204,7 +23896,7 @@ class Office365LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -23214,22 +23906,22 @@ class Office365LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object + :type office365_tenant_id: any :param service_principal_tenant_id: Required. Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: object + :type service_principal_tenant_id: any :param service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: Required. Specify the application's key. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -23274,37 +23966,40 @@ class Office365Source(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). - :type allowed_groups: object + :type allowed_groups: any :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). - :type user_scope_filter_uri: object + :type user_scope_filter_uri: any :param date_filter_column: The Column to apply the :code:`` and :code:``. Type: string (or Expression with resultType string). - :type date_filter_column: object + :type date_filter_column: any :param start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type start_time: object + :type start_time: any :param end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type end_time: object + :type end_time: any :param output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ]. - :type output_columns: object + :type output_columns: any """ _validation = { @@ -23317,6 +24012,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -23586,7 +24282,7 @@ class OracleCloudStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -23596,10 +24292,10 @@ class OracleCloudStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -23607,11 +24303,11 @@ class OracleCloudStorageLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -23650,21 +24346,21 @@ class OracleCloudStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -23697,42 +24393,45 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -23743,6 +24442,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -23780,7 +24480,7 @@ class OracleLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -23790,16 +24490,16 @@ class OracleLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -23834,18 +24534,18 @@ class OraclePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Oracle source partitioning. :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object + :type partition_names: any :param partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -23873,7 +24573,7 @@ class OracleServiceCloudLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -23883,29 +24583,29 @@ class OracleServiceCloudLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object + :type host: any :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: object + :type username: any :param password: Required. The password corresponding to the user name that you provided in the username key. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -23953,28 +24653,28 @@ class OracleServiceCloudObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -24011,27 +24711,30 @@ class OracleServiceCloudSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -24044,6 +24747,7 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24065,27 +24769,30 @@ class OracleSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -24100,6 +24807,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24119,27 +24827,30 @@ class OracleSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). - :type oracle_reader_query: object + :type oracle_reader_query: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -24157,6 +24868,7 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -24184,35 +24896,35 @@ class OracleTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -24253,30 +24965,31 @@ class OrcDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: any """ _validation = { @@ -24295,7 +25008,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -24315,13 +25028,13 @@ class OrcFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -24350,24 +25063,27 @@ class OrcSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: ORC format settings. @@ -24386,6 +25102,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -24407,18 +25124,21 @@ class OrcSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -24436,6 +25156,7 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -24457,16 +25178,16 @@ class OrcWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -24529,7 +25250,7 @@ class ParameterSpecification(msrest.serialization.Model): "Float", "Bool", "Array", "SecureString". :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. - :type default_value: object + :type default_value: any """ _validation = { @@ -24557,31 +25278,31 @@ class ParquetDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~azure.mgmt.datafactory.models.CompressionCodec + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: any """ _validation = { @@ -24600,7 +25321,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -24620,13 +25341,13 @@ class ParquetFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -24655,24 +25376,27 @@ class ParquetSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Parquet format settings. @@ -24691,6 +25415,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -24712,18 +25437,21 @@ class ParquetSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -24741,6 +25469,7 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -24762,16 +25491,16 @@ class ParquetWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -24802,7 +25531,7 @@ class PaypalLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -24812,27 +25541,27 @@ class PaypalLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: object + :type host: any :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with your PayPal application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -24879,28 +25608,28 @@ class PaypalObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -24937,27 +25666,30 @@ class PaypalSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -24970,6 +25702,7 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24991,7 +25724,7 @@ class PhoenixLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25001,45 +25734,45 @@ class PhoenixLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Phoenix server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. - :type port: object + :type port: any :param http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. - :type http_path: object + :type http_path: any :param authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", "WindowsAzureHDInsightService". :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :param username: The user name used to connect to the Phoenix server. - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -25096,35 +25829,35 @@ class PhoenixObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Phoenix. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -25165,27 +25898,30 @@ class PhoenixSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -25198,6 +25934,7 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25216,7 +25953,7 @@ class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): """Pipeline ElapsedTime Metric Policy. :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :type duration: object + :type duration: any """ _attribute_map = { @@ -25305,7 +26042,7 @@ class PipelineReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". + :ivar type: Pipeline reference type. Has constant value: "PipelineReference". :vartype type: str :param reference_name: Required. Reference pipeline name. :type reference_name: str @@ -25350,7 +26087,7 @@ class PipelineResource(SubResource): :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param description: The description of the pipeline. :type description: str :param activities: List of activities in pipeline. @@ -25362,9 +26099,9 @@ class PipelineResource(SubResource): :param concurrency: The max number of concurrent runs for the pipeline. :type concurrency: int :param annotations: List of tags that can be used for describing the Pipeline. - :type annotations: list[object] + :type annotations: list[any] :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] + :type run_dimensions: dict[str, any] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -25421,7 +26158,7 @@ class PipelineRun(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar run_id: Identifier of a run. :vartype run_id: str :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. @@ -25516,18 +26253,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model): :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str + :ivar pipeline_name: The name of the pipeline that triggered the run, if any. + :vartype pipeline_name: str + :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. + :vartype pipeline_run_id: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, } def __init__( @@ -25538,6 +26283,8 @@ def __init__( self.name = None self.id = None self.invoked_by_type = None + self.pipeline_name = None + self.pipeline_run_id = None class PipelineRunsQueryResponse(msrest.serialization.Model): @@ -25575,20 +26322,20 @@ class PolybaseSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param reject_type: Reject type. Possible values include: "value", "percentage". :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :param reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. - :type reject_value: object + :type reject_value: any :param reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object + :type reject_sample_value: any :param use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). - :type use_type_default: object + :type use_type_default: any """ _attribute_map = { @@ -25618,7 +26365,7 @@ class PostgreSqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25628,15 +26375,15 @@ class PostgreSqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -25674,26 +26421,29 @@ class PostgreSqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -25706,6 +26456,7 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25727,34 +26478,34 @@ class PostgreSqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -25795,7 +26546,7 @@ class PrestoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25805,47 +26556,47 @@ class PrestoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: object + :type server_version: any :param catalog: Required. The catalog context for all request against the server. - :type catalog: object + :type catalog: any :param port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. - :type port: object + :type port: any :param authentication_type: Required. The authentication mechanism used to connect to the Presto server. Possible values include: "Anonymous", "LDAP". :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: object + :type time_zone_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -25908,35 +26659,35 @@ class PrestoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Presto. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -25977,27 +26728,30 @@ class PrestoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -26010,6 +26764,7 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26024,6 +26779,262 @@ def __init__( self.query = kwargs.get('query', None) +class PrivateEndpointConnectionListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Private Endpoint Connections. + :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class PrivateEndpointConnectionResource(SubResource): + """Private Endpoint Connection ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): + """A request to approve or reject a private endpoint connection. + + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + +class PrivateLinkConnectionApprovalRequestResource(SubResource): + """Private Endpoint Connection Approval ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkConnectionState(msrest.serialization.Model): + """The state of a private link connection. + + :param status: Status of a private link connection. + :type status: str + :param description: Description of a private link connection. + :type description: str + :param actions_required: ActionsRequired for a private link connection. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = kwargs.get('actions_required', None) + + +class PrivateLinkResource(SubResource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkResourceProperties(msrest.serialization.Model): + """Properties of a private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar group_id: GroupId of a private link resource. + :vartype group_id: str + :ivar required_members: RequiredMembers of a private link resource. + :vartype required_members: list[str] + :ivar required_zone_names: RequiredZoneNames of a private link resource. + :vartype required_zone_names: list[str] + """ + + _validation = { + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + 'required_zone_names': {'readonly': True}, + } + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourceProperties, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = None + + +class PrivateLinkResourcesWrapper(msrest.serialization.Model): + """Wrapper for a collection of private link resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. + :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + self.value = kwargs['value'] + + class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. @@ -26054,7 +27065,7 @@ class QuickBooksLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -26064,16 +27075,16 @@ class QuickBooksLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :type endpoint: object + :type endpoint: any :param company_id: The company ID of the QuickBooks company to authorize. - :type company_id: object + :type company_id: any :param consumer_key: The consumer key for OAuth 1.0 authentication. - :type consumer_key: object + :type consumer_key: any :param consumer_secret: The consumer secret for OAuth 1.0 authentication. :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token for OAuth 1.0 authentication. @@ -26082,11 +27093,11 @@ class QuickBooksLinkedService(LinkedService): :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -26135,28 +27146,28 @@ class QuickBooksObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -26193,27 +27204,30 @@ class QuickBooksSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -26226,6 +27240,7 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26245,7 +27260,7 @@ class RecurrenceSchedule(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param minutes: The minutes. :type minutes: list[int] :param hours: The hours. @@ -26285,7 +27300,7 @@ class RecurrenceScheduleOccurrence(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday". :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek @@ -26316,15 +27331,15 @@ class RedirectIncompatibleRowSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). - :type linked_service_name: object + :type linked_service_name: any :param path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -26358,7 +27373,7 @@ class RedshiftUnloadSettings(msrest.serialization.Model): :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any """ _validation = { @@ -26387,20 +27402,23 @@ class RelationalSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -26416,6 +27434,7 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -26437,29 +27456,29 @@ class RelationalTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The relational table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -26489,6 +27508,40 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class RemotePrivateEndpointConnection(msrest.serialization.Model): + """A remote private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: + :vartype provisioning_state: str + :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + **kwargs + ): + super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + self.provisioning_state = None + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -26498,7 +27551,7 @@ class RerunTumblingWindowTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -26507,9 +27560,9 @@ class RerunTumblingWindowTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param parent_trigger: Required. The parent trigger reference. - :type parent_trigger: object + :type parent_trigger: any :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_start_time: ~datetime.datetime @@ -26561,7 +27614,7 @@ class ResponsysLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -26571,30 +27624,30 @@ class ResponsysLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object + :type endpoint: any :param client_id: Required. The client ID associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -26641,28 +27694,28 @@ class ResponsysObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -26699,27 +27752,30 @@ class ResponsysSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -26732,6 +27788,7 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26753,41 +27810,41 @@ class RestResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :type relative_url: object + :type relative_url: any :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: object + :type request_body: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: object + :type pagination_rules: any """ _validation = { @@ -26832,7 +27889,7 @@ class RestServiceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -26842,43 +27899,43 @@ class RestServiceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The base URL of the REST service. - :type url: object + :type url: any :param enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :type enable_server_certificate_validation: any :param authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", "ManagedServiceIdentity". :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. - :type user_name: object + :type user_name: any :param password: The password used in Basic authentication type. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: object + :type auth_headers: any :param service_principal_id: The application's client ID used in AadServicePrincipal authentication type. - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The application's key used in AadServicePrincipal authentication type. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: object + :type aad_resource_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -26935,40 +27992,43 @@ class RestSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any :param request_interval: The time to await before sending next request, in milliseconds. - :type request_interval: object + :type request_interval: any :param http_compression_type: Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :type http_compression_type: object + :type http_compression_type: any """ _validation = { @@ -26983,6 +28043,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -27010,37 +28071,40 @@ class RestSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: object + :type request_body: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: object + :type pagination_rules: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any :param request_interval: The time to await before sending next page request. - :type request_interval: object + :type request_interval: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -27056,6 +28120,7 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -27085,7 +28150,7 @@ class RetryPolicy(msrest.serialization.Model): :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type count: object + :type count: any :param interval_in_seconds: Interval between retries in seconds. Default is 30. :type interval_in_seconds: int """ @@ -27236,7 +28301,7 @@ class SalesforceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -27246,26 +28311,26 @@ class SalesforceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object + :type environment_url: any :param username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: object + :type api_version: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -27308,7 +28373,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -27318,31 +28383,31 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -27387,28 +28452,28 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -27445,27 +28510,30 @@ class SalesforceMarketingCloudSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -27478,6 +28546,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -27499,29 +28568,29 @@ class SalesforceObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). - :type object_api_name: object + :type object_api_name: any """ _validation = { @@ -27558,7 +28627,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -27568,29 +28637,29 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object + :type environment_url: any :param username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: object + :type api_version: any :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). - :type extended_properties: object + :type extended_properties: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -27635,29 +28704,29 @@ class SalesforceServiceCloudObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). - :type object_api_name: object + :type object_api_name: any """ _validation = { @@ -27694,37 +28763,40 @@ class SalesforceServiceCloudSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: object + :type external_id_field_name: any :param ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any """ _validation = { @@ -27739,6 +28811,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -27762,20 +28835,23 @@ class SalesforceServiceCloudSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior @@ -27794,6 +28870,7 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -27817,37 +28894,40 @@ class SalesforceSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: object + :type external_id_field_name: any :param ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any """ _validation = { @@ -27862,6 +28942,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -27885,26 +28966,29 @@ class SalesforceSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior @@ -27920,6 +29004,7 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -27943,23 +29028,23 @@ class SapBwCubeDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -27997,7 +29082,7 @@ class SapBWLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28007,25 +29092,25 @@ class SapBWLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param system_number: Required. System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object + :type system_number: any :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP BW server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -28071,26 +29156,29 @@ class SapBwSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -28103,6 +29191,7 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28124,7 +29213,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28134,20 +29223,20 @@ class SapCloudForCustomerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param username: The username for Basic authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -28187,29 +29276,29 @@ class SapCloudForCustomerResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -28247,24 +29336,27 @@ class SapCloudForCustomerSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or @@ -28273,7 +29365,7 @@ class SapCloudForCustomerSink(CopySink): to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -28288,6 +29380,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28309,32 +29402,35 @@ class SapCloudForCustomerSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -28347,6 +29443,7 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28370,7 +29467,7 @@ class SapEccLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28380,7 +29477,7 @@ class SapEccLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). @@ -28433,29 +29530,29 @@ class SapEccResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -28493,32 +29590,35 @@ class SapEccSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -28531,6 +29631,7 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28554,7 +29655,7 @@ class SapHanaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28564,25 +29665,25 @@ class SapHanaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: "Basic", "Windows". :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :param user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP HANA server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -28623,7 +29724,7 @@ class SapHanaPartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any """ _attribute_map = { @@ -28645,32 +29746,35 @@ class SapHanaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). - :type packet_size: object + :type packet_size: any :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings @@ -28686,6 +29790,7 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28713,31 +29818,31 @@ class SapHanaTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -28776,7 +29881,7 @@ class SapOpenHubLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28786,43 +29891,43 @@ class SapOpenHubLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object + :type system_number: any :param client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). - :type language: object + :type language: any :param system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: object + :type system_id: any :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: object + :type message_server: any :param message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object + :type message_server_service: any :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: object + :type logon_group: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -28875,38 +29980,41 @@ class SapOpenHubSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object + :type exclude_last_request: any :param base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: object + :type base_request_id: any :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object + :type custom_rfc_read_table_function_module: any :param sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: object + :type sap_data_column_delimiter: any """ _validation = { @@ -28919,6 +30027,7 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, @@ -28946,36 +30055,36 @@ class SapOpenHubTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: object + :type open_hub_destination_name: any :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object + :type exclude_last_request: any :param base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: object + :type base_request_id: any """ _validation = { @@ -29017,7 +30126,7 @@ class SapTableLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -29027,57 +30136,57 @@ class SapTableLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object + :type system_number: any :param client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). - :type language: object + :type language: any :param system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: object + :type system_id: any :param user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP server where the table is located. :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: object + :type message_server: any :param message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object + :type message_server_service: any :param snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: object + :type snc_mode: any :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object + :type snc_my_name: any :param snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: object + :type snc_partner_name: any :param snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_library_path: object + :type snc_library_path: any :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object + :type snc_qop: any :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: object + :type logon_group: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -29138,18 +30247,18 @@ class SapTablePartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any :param max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object + :type max_partitions_number: any """ _attribute_map = { @@ -29177,29 +30286,29 @@ class SapTableResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -29237,50 +30346,53 @@ class SapTableSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). - :type row_count: object + :type row_count: any :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). - :type row_skips: object + :type row_skips: any :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). - :type rfc_table_fields: object + :type rfc_table_fields: any :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). - :type rfc_table_options: object + :type rfc_table_options: any :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object + :type custom_rfc_read_table_function_module: any :param sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: object + :type sap_data_column_delimiter: any :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings @@ -29296,6 +30408,7 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, @@ -29335,7 +30448,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -29344,7 +30457,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. @@ -29381,7 +30494,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", "Day", "Week", "Month", "Year". :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency @@ -29430,9 +30543,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~azure.mgmt.datafactory.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -29535,7 +30647,7 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -29572,7 +30684,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. @@ -29696,7 +30808,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -29822,7 +30934,7 @@ class ServiceNowLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -29832,37 +30944,37 @@ class ServiceNowLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. :code:``.service-now.com). - :type endpoint: object + :type endpoint: any :param authentication_type: Required. The authentication type to use. Possible values include: "Basic", "OAuth2". :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. - :type username: object + :type username: any :param password: The password corresponding to the user name for Basic and OAuth2 authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id for OAuth2 authentication. - :type client_id: object + :type client_id: any :param client_secret: The client secret for OAuth2 authentication. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -29915,28 +31027,28 @@ class ServiceNowObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -29973,27 +31085,30 @@ class ServiceNowSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -30006,6 +31121,7 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -30020,14 +31136,14 @@ def __init__( self.query = kwargs.get('query', None) -class SetVariableActivity(Activity): +class SetVariableActivity(ControlActivity): """Set value for a Variable. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -30041,7 +31157,7 @@ class SetVariableActivity(Activity): :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str :param value: Value to be set. Could be a static value or Expression. - :type value: object + :type value: any """ _validation = { @@ -30077,15 +31193,15 @@ class SftpLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -30114,39 +31230,42 @@ class SftpReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -30157,6 +31276,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -30192,7 +31312,7 @@ class SftpServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -30202,30 +31322,30 @@ class SftpServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The SFTP server host name. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "SshPublicKey", "MultiFactor". :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to logon the SFTP server for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). - :type private_key_path: object + :type private_key_path: any :param private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. @@ -30235,11 +31355,11 @@ class SftpServerLinkedService(LinkedService): :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: object + :type skip_host_key_validation: any :param host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object + :type host_key_fingerprint: any """ _validation = { @@ -30293,21 +31413,24 @@ class SftpWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :type operation_timeout: object + :type operation_timeout: any :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - :type use_temp_file_rename: object + :type use_temp_file_rename: any """ _validation = { @@ -30318,6 +31441,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -30340,7 +31464,7 @@ class SharePointOnlineListLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -30350,26 +31474,26 @@ class SharePointOnlineListLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param site_url: Required. The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). - :type site_url: object + :type site_url: any :param tenant_id: Required. The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). - :type tenant_id: object + :type tenant_id: any :param service_principal_id: Required. The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -30414,29 +31538,29 @@ class SharePointOnlineListResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). - :type list_name: object + :type list_name: any """ _validation = { @@ -30473,25 +31597,28 @@ class SharePointOnlineListSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -30504,6 +31631,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -30525,7 +31653,7 @@ class ShopifyLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -30535,26 +31663,26 @@ class ShopifyLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :type host: object + :type host: any :param access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -30598,28 +31726,28 @@ class ShopifyObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -30656,27 +31784,30 @@ class ShopifySource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -30689,6 +31820,7 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -30708,10 +31840,10 @@ class SkipErrorFile(msrest.serialization.Model): :param file_missing: Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). - :type file_missing: object + :type file_missing: any :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). - :type data_inconsistency: object + :type data_inconsistency: any """ _attribute_map = { @@ -30735,32 +31867,32 @@ class SnowflakeDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Snowflake database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -30799,19 +31931,19 @@ class SnowflakeExportCopyCommand(ExportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The export setting type.Constant filled by server. :type type: str :param additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, object] + :type additional_copy_options: dict[str, any] :param additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. - :type additional_format_options: dict[str, object] + :type additional_format_options: dict[str, any] """ _validation = { @@ -30842,19 +31974,19 @@ class SnowflakeImportCopyCommand(ImportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The import setting type.Constant filled by server. :type type: str :param additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, object] + :type additional_copy_options: dict[str, any] :param additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. - :type additional_format_options: dict[str, object] + :type additional_format_options: dict[str, any] """ _validation = { @@ -30885,7 +32017,7 @@ class SnowflakeLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -30895,16 +32027,16 @@ class SnowflakeLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string of snowflake. Type: string, SecureString. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -30942,27 +32074,30 @@ class SnowflakeSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param import_settings: Snowflake import settings. :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ @@ -30979,6 +32114,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -31000,20 +32136,23 @@ class SnowflakeSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Snowflake Sql query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param export_settings: Snowflake export settings. :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ @@ -31028,6 +32167,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -31049,7 +32189,7 @@ class SparkLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -31059,12 +32199,12 @@ class SparkLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. IP address or host name of the Spark server. - :type host: object + :type host: any :param port: Required. The TCP port that the Spark server uses to listen for client connections. - :type port: object + :type port: any :param server_type: The type of Spark server. Possible values include: "SharkServer", "SharkServer2", "SparkThriftServer". :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType @@ -31077,32 +32217,32 @@ class SparkLinkedService(LinkedService): "WindowsAzureHDInsightService". :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :param username: The user name that you use to access Spark Server. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the Username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object + :type http_path: any :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -31164,34 +32304,34 @@ class SparkObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -31232,27 +32372,30 @@ class SparkSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -31265,6 +32408,7 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31279,6 +32423,44 @@ def __init__( self.query = kwargs.get('query', None) +class SqlAlwaysEncryptedProperties(msrest.serialization.Model): + """Sql always encrypted properties. + + All required parameters must be populated in order to send to Azure. + + :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. + Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipal", "ManagedIdentity". + :type always_encrypted_akv_auth_type: str or + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Azure Key Vault authentication. Type: string (or Expression with resultType string). + :type service_principal_id: any + :param service_principal_key: The key of the service principal used to authenticate against + Azure Key Vault. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'always_encrypted_akv_auth_type': {'required': True}, + } + + _attribute_map = { + 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, + 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + self.always_encrypted_akv_auth_type = kwargs['always_encrypted_akv_auth_type'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + + class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. @@ -31286,41 +32468,44 @@ class SqlDWSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). - :type allow_poly_base: object + :type allow_poly_base: any :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). - :type allow_copy_command: object + :type allow_copy_command: any :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. :type copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -31335,6 +32520,7 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, @@ -31364,38 +32550,41 @@ class SqlDWSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. - :type stored_procedure_parameters: object + :type stored_procedure_parameters: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -31410,6 +32599,7 @@ class SqlDWSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -31439,42 +32629,45 @@ class SqlMISink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -31489,6 +32682,7 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -31518,39 +32712,42 @@ class SqlMISource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object + :type produce_additional_types: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -31565,6 +32762,7 @@ class SqlMISource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -31593,19 +32791,20 @@ class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be - used for proceeding partitioning. If not specified, the primary key of the table is auto- - detected and used as the partition column. Type: string (or Expression with resultType string). - :type partition_column_name: object + used for proceeding partitioning. If not specified, the primary key of the table is + auto-detected and used as the partition column. Type: string (or Expression with resultType + string). + :type partition_column_name: any :param partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -31631,7 +32830,7 @@ class SqlServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -31641,19 +32840,21 @@ class SqlServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: The on-premises Windows authentication password. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -31672,6 +32873,7 @@ class SqlServerLinkedService(LinkedService): 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( @@ -31684,6 +32886,7 @@ def __init__( self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) class SqlServerSink(CopySink): @@ -31693,42 +32896,45 @@ class SqlServerSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -31743,6 +32949,7 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -31772,39 +32979,42 @@ class SqlServerSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object + :type produce_additional_types: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -31819,6 +33029,7 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -31850,7 +33061,7 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -31867,7 +33078,7 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). - :type stored_procedure_name: object + :type stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, @@ -31910,35 +33121,35 @@ class SqlServerTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -31979,42 +33190,45 @@ class SqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -32029,6 +33243,7 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -32058,30 +33273,33 @@ class SqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, @@ -32089,10 +33307,10 @@ class SqlSource(TabularSource): :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - :type isolation_level: object + :type isolation_level: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -32107,6 +33325,7 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -32138,7 +33357,7 @@ class SquareLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -32148,33 +33367,33 @@ class SquareLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :type host: object + :type host: any :param client_id: The client ID associated with your Square application. - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with your Square application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). - :type redirect_uri: object + :type redirect_uri: any :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -32223,28 +33442,28 @@ class SquareObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -32281,27 +33500,30 @@ class SquareSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -32314,6 +33536,7 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32334,9 +33557,9 @@ class SSISAccessCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param domain: Required. Domain for windows authentication. - :type domain: object + :type domain: any :param user_name: Required. UseName for windows authentication. - :type user_name: object + :type user_name: any :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -32370,12 +33593,12 @@ class SSISChildPackage(msrest.serialization.Model): :param package_path: Required. Path for embedded child package. Type: string (or Expression with resultType string). - :type package_path: object + :type package_path: any :param package_name: Name for embedded child package. :type package_name: str :param package_content: Required. Content for embedded child package. Type: string (or Expression with resultType string). - :type package_content: object + :type package_content: any :param package_last_modified_date: Last modified date for embedded child package. :type package_last_modified_date: str """ @@ -32528,9 +33751,9 @@ class SSISExecutionCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param domain: Required. Domain for windows authentication. - :type domain: object + :type domain: any :param user_name: Required. UseName for windows authentication. - :type user_name: object + :type user_name: any :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ @@ -32564,7 +33787,7 @@ class SSISExecutionParameter(msrest.serialization.Model): :param value: Required. SSIS package execution parameter value. Type: string (or Expression with resultType string). - :type value: object + :type value: any """ _validation = { @@ -32625,7 +33848,7 @@ class SSISLogLocation(msrest.serialization.Model): :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). - :type log_path: object + :type log_path: any :param type: Required. The type of SSIS log location. Possible values include: "File". :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType :param access_credential: The package execution log access credential. @@ -32633,7 +33856,7 @@ class SSISLogLocation(msrest.serialization.Model): :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object + :type log_refresh_interval: any """ _validation = { @@ -32769,7 +33992,7 @@ class SSISPackageLocation(msrest.serialization.Model): :param package_path: The SSIS package path. Type: string (or Expression with resultType string). - :type package_path: object + :type package_path: any :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", "InlinePackage", "PackageStore". :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType @@ -32779,14 +34002,14 @@ class SSISPackageLocation(msrest.serialization.Model): :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). - :type configuration_path: object + :type configuration_path: any :param configuration_access_credential: The configuration file access credential. :type configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with resultType string). - :type package_content: object + :type package_content: any :param package_last_modified_date: The embedded package last modified date. :type package_last_modified_date: str :param child_packages: The embedded child package list. @@ -32944,7 +34167,7 @@ class SSISPropertyOverride(msrest.serialization.Model): :param value: Required. SSIS package property override value. Type: string (or Expression with resultType string). - :type value: object + :type value: any :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true. :type is_sensitive: bool @@ -33018,15 +34241,15 @@ class StagingSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param linked_service_name: Required. Staging linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). - :type path: object + :type path: any :param enable_compression: Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_compression: object + :type enable_compression: any """ _validation = { @@ -33056,7 +34279,7 @@ class StoredProcedureParameter(msrest.serialization.Model): :param value: Stored procedure parameter value. Type: string (or Expression with resultType string). - :type value: object + :type value: any :param type: Stored procedure parameter type. Possible values include: "String", "Int", "Int64", "Decimal", "Guid", "Boolean", "Date". :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType @@ -33076,14 +34299,14 @@ def __init__( self.type = kwargs.get('type', None) -class SwitchActivity(Activity): +class SwitchActivity(ControlActivity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -33165,7 +34388,7 @@ class SybaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -33175,27 +34398,27 @@ class SybaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. Server name for connection. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param schema: Schema name for connection. Type: string (or Expression with resultType string). - :type schema: object + :type schema: any :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -33242,26 +34465,29 @@ class SybaseSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -33274,6 +34500,7 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -33295,28 +34522,28 @@ class SybaseTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Sybase table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -33353,33 +34580,33 @@ class TabularTranslator(CopyTranslator): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy translator type.Constant filled by server. :type type: str :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. - :type column_mappings: object + :type column_mappings: any :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. - :type schema_mapping: object + :type schema_mapping: any :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). - :type collection_reference: object + :type collection_reference: any :param map_complex_values_to_string: Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). - :type map_complex_values_to_string: object + :type map_complex_values_to_string: any :param mappings: Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). - :type mappings: object + :type mappings: any :param type_conversion: Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). - :type type_conversion: object + :type type_conversion: any :param type_conversion_settings: Type conversion settings. :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ @@ -33422,12 +34649,12 @@ class TarGZipReadSettings(CompressionReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: object + :type preserve_compression_file_name_as_folder: any """ _validation = { @@ -33456,12 +34683,12 @@ class TarReadSettings(CompressionReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: object + :type preserve_compression_file_name_as_folder: any """ _validation = { @@ -33490,7 +34717,7 @@ class TeradataLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -33500,24 +34727,24 @@ class TeradataLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param server: Server name for connection. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -33558,15 +34785,15 @@ class TeradataPartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -33592,29 +34819,32 @@ class TeradataSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings @@ -33630,6 +34860,7 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -33655,31 +34886,31 @@ class TeradataTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param database: The database name of Teradata. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param table: The table name of Teradata. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -33718,40 +34949,40 @@ class TextFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: object + :type column_delimiter: any :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object + :type row_delimiter: any :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object + :type escape_char: any :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object + :type quote_char: any :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any :param encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object + :type treat_empty_as_null: any :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). - :type skip_line_count: object + :type skip_line_count: any :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object + :type first_row_as_header: any """ _validation = { @@ -33888,7 +35119,7 @@ class TriggerPipelineReference(msrest.serialization.Model): :param pipeline_reference: Pipeline reference. :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _attribute_map = { @@ -33942,7 +35173,7 @@ class TriggerReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Trigger reference type. Default value: "TriggerReference". + :ivar type: Trigger reference type. Has constant value: "TriggerReference". :vartype type: str :param reference_name: Required. Reference trigger name. :type reference_name: str @@ -34018,7 +35249,7 @@ class TriggerRun(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar trigger_run_id: Trigger run id. :vartype trigger_run_id: str :ivar trigger_name: Trigger name. @@ -34039,7 +35270,7 @@ class TriggerRun(msrest.serialization.Model): :ivar run_dimension: Run dimension for which trigger was fired. :vartype run_dimension: dict[str, str] :ivar dependency_status: Status of the upstream pipelines. - :vartype dependency_status: dict[str, object] + :vartype dependency_status: dict[str, any] """ _validation = { @@ -34157,7 +35388,7 @@ class TumblingWindowTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -34166,7 +35397,7 @@ class TumblingWindowTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference @@ -34185,7 +35416,7 @@ class TumblingWindowTrigger(Trigger): :param delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object + :type delay: any :param max_concurrency: Required. The max number of parallel time windows (ready for execution) for which a new run is triggered. :type max_concurrency: int @@ -34286,22 +35517,22 @@ class TypeConversionSettings(msrest.serialization.Model): :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - :type allow_data_truncation: object + :type allow_data_truncation: any :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - :type treat_boolean_as_number: object + :type treat_boolean_as_number: any :param date_time_format: The format for DateTime values. Type: string (or Expression with resultType string). - :type date_time_format: object + :type date_time_format: any :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or Expression with resultType string). - :type date_time_offset_format: object + :type date_time_offset_format: any :param time_span_format: The format for TimeSpan values. Type: string (or Expression with resultType string). - :type time_span_format: object + :type time_span_format: any :param culture: The culture used to convert data from/to string. Type: string (or Expression with resultType string). - :type culture: object + :type culture: any """ _attribute_map = { @@ -34326,14 +35557,14 @@ def __init__( self.culture = kwargs.get('culture', None) -class UntilActivity(Activity): +class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -34352,7 +35583,7 @@ class UntilActivity(Activity): Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object + :type timeout: any :param activities: Required. List of activities to execute. :type activities: list[~azure.mgmt.datafactory.models.Activity] """ @@ -34485,7 +35716,7 @@ class UserProperty(msrest.serialization.Model): :type name: str :param value: Required. User property value. Type: string (or Expression with resultType string). - :type value: object + :type value: any """ _validation = { @@ -34507,14 +35738,14 @@ def __init__( self.value = kwargs['value'] -class ValidationActivity(Activity): +class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -34529,17 +35760,17 @@ class ValidationActivity(Activity): it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object + :type timeout: any :param sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). - :type sleep: object + :type sleep: any :param minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). - :type minimum_size: object + :type minimum_size: any :param child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object + :type child_items: any :param dataset: Required. Validation activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -34585,7 +35816,7 @@ class VariableSpecification(msrest.serialization.Model): :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. - :type default_value: object + :type default_value: any """ _validation = { @@ -34613,7 +35844,7 @@ class VerticaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -34623,16 +35854,16 @@ class VerticaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -34669,27 +35900,30 @@ class VerticaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -34702,6 +35936,7 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -34723,35 +35958,35 @@ class VerticaTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Vertica. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -34785,14 +36020,14 @@ def __init__( self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class WaitActivity(Activity): +class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -34804,7 +36039,7 @@ class WaitActivity(Activity): :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: object + :type wait_time_in_seconds: any """ _validation = { @@ -34839,7 +36074,7 @@ class WebActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -34859,14 +36094,14 @@ class WebActivity(ExecutionActivity): :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod :param url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: object + :type headers: any :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :type body: any :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param datasets: List of datasets passed to web endpoint. @@ -34932,16 +36167,16 @@ class WebActivityAuthentication(msrest.serialization.Model): :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). - :type resource: object + :type resource: any :param user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :type user_tenant: object + :type user_tenant: any """ _validation = { @@ -34980,7 +36215,7 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". @@ -35017,7 +36252,7 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". @@ -35049,14 +36284,14 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType :param username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Required. The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -35092,7 +36327,7 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". @@ -35127,14 +36362,14 @@ def __init__( self.password = kwargs['password'] -class WebHookActivity(Activity): +class WebHookActivity(ControlActivity): """WebHook activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -35149,7 +36384,7 @@ class WebHookActivity(Activity): :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -35157,17 +36392,17 @@ class WebHookActivity(Activity): :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: object + :type headers: any :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :type body: any :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - :type report_status_on_call_back: object + :type report_status_on_call_back: any """ _validation = { @@ -35215,7 +36450,7 @@ class WebLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -35225,7 +36460,7 @@ class WebLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param type_properties: Required. Web linked service properties. :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ @@ -35261,18 +36496,21 @@ class WebSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -35288,6 +36526,7 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -35307,32 +36546,32 @@ class WebTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object + :type index: any :param path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -35372,7 +36611,7 @@ class XeroLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -35382,12 +36621,12 @@ class XeroLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param host: The endpoint of the Xero server. (i.e. api.xero.com). - :type host: object + :type host: any :param consumer_key: The consumer key associated with the Xero application. :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase :param private_key: The private key from the .pem file that was generated for your Xero private @@ -35396,18 +36635,18 @@ class XeroLinkedService(LinkedService): :type private_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -35454,28 +36693,28 @@ class XeroObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -35512,27 +36751,30 @@ class XeroSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -35545,6 +36787,7 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -35566,23 +36809,23 @@ class XmlDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -35593,9 +36836,9 @@ class XmlDataset(Dataset): of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ @@ -35640,25 +36883,25 @@ class XmlReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :type validation_mode: object + :type validation_mode: any :param detect_data_type: Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :type detect_data_type: object + :type detect_data_type: any :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :type namespaces: object + :type namespaces: any :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :type namespace_prefixes: object + :type namespace_prefixes: any """ _validation = { @@ -35695,18 +36938,21 @@ class XmlSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Xml store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Xml format settings. @@ -35726,6 +36972,7 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -35749,12 +36996,12 @@ class ZipDeflateReadSettings(CompressionReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_zip_file_name_as_folder: object + :type preserve_zip_file_name_as_folder: any """ _validation = { @@ -35783,7 +37030,7 @@ class ZohoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -35793,28 +37040,28 @@ class ZohoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :type endpoint: object + :type endpoint: any :param access_token: The access token for Zoho authentication. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -35859,28 +37106,28 @@ class ZohoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -35917,27 +37164,30 @@ class ZohoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -35950,6 +37200,7 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 2041a2bb0ac5..297e2db87335 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -7,7 +7,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union import msrest.serialization @@ -49,13 +49,13 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: ControlActivity, ExecutionActivity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -83,14 +83,14 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'Container': 'ControlActivity', 'Execution': 'ExecutionActivity'} } def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -112,7 +112,7 @@ class ActivityDependency(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param activity: Required. Activity name. :type activity: str :param dependency_conditions: Required. Match-Condition for the dependency. @@ -135,7 +135,7 @@ def __init__( *, activity: str, dependency_conditions: List[Union[str, "DependencyCondition"]], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ActivityDependency, self).__init__(**kwargs) @@ -149,14 +149,14 @@ class ActivityPolicy(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object + :type timeout: any :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type retry: object + :type retry: any :param retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. :type retry_interval_in_seconds: int @@ -184,9 +184,9 @@ class ActivityPolicy(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - timeout: Optional[object] = None, - retry: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + timeout: Optional[Any] = None, + retry: Optional[Any] = None, retry_interval_in_seconds: Optional[int] = None, secure_input: Optional[bool] = None, secure_output: Optional[bool] = None, @@ -208,7 +208,7 @@ class ActivityRun(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar pipeline_name: The name of the pipeline. :vartype pipeline_name: str :ivar pipeline_run_id: The id of the pipeline run. @@ -230,11 +230,11 @@ class ActivityRun(msrest.serialization.Model): :ivar duration_in_ms: The duration of the activity run. :vartype duration_in_ms: int :ivar input: The input for the activity. - :vartype input: object + :vartype input: any :ivar output: The output for the activity. - :vartype output: object + :vartype output: any :ivar error: The error if any from the activity run. - :vartype error: object + :vartype error: any """ _validation = { @@ -273,7 +273,7 @@ class ActivityRun(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ActivityRun, self).__init__(**kwargs) @@ -351,9 +351,9 @@ class AdditionalColumns(msrest.serialization.Model): """Specify the column name and value of additional columns. :param name: Additional column name. Type: string (or Expression with resultType string). - :type name: object + :type name: any :param value: Additional column value. Type: string (or Expression with resultType string). - :type value: object + :type value: any """ _attribute_map = { @@ -364,8 +364,8 @@ class AdditionalColumns(msrest.serialization.Model): def __init__( self, *, - name: Optional[object] = None, - value: Optional[object] = None, + name: Optional[Any] = None, + value: Optional[Any] = None, **kwargs ): super(AdditionalColumns, self).__init__(**kwargs) @@ -383,7 +383,7 @@ class LinkedService(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -393,7 +393,7 @@ class LinkedService(msrest.serialization.Model): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] """ _validation = { @@ -416,11 +416,11 @@ class LinkedService(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(LinkedService, self).__init__(**kwargs) @@ -439,7 +439,7 @@ class AmazonMWSLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -449,36 +449,36 @@ class AmazonMWSLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com). - :type endpoint: object + :type endpoint: any :param marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2). - :type marketplace_id: object + :type marketplace_id: any :param seller_id: Required. The Amazon seller ID. - :type seller_id: object + :type seller_id: any :param mws_auth_token: The Amazon MWS authentication token. :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object + :type access_key_id: any :param secret_key: The secret key used to access data. :type secret_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -511,21 +511,21 @@ class AmazonMWSLinkedService(LinkedService): def __init__( self, *, - endpoint: object, - marketplace_id: object, - seller_id: object, - access_key_id: object, - additional_properties: Optional[Dict[str, object]] = None, + endpoint: Any, + marketplace_id: Any, + seller_id: Any, + access_key_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, mws_auth_token: Optional["SecretBase"] = None, secret_key: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -552,23 +552,23 @@ class Dataset(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -599,12 +599,12 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -627,28 +627,28 @@ class AmazonMWSObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -673,14 +673,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -698,18 +698,21 @@ class CopySource(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -722,6 +725,7 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -731,10 +735,11 @@ class CopySource(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, **kwargs ): super(CopySource, self).__init__(**kwargs) @@ -743,6 +748,7 @@ def __init__( self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class TabularSource(CopySource): @@ -755,21 +761,24 @@ class TabularSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -785,6 +794,7 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -796,15 +806,16 @@ class TabularSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'TabularSource' # type: str self.query_timeout = query_timeout self.additional_columns = additional_columns @@ -817,27 +828,30 @@ class AmazonMWSSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -850,6 +864,7 @@ class AmazonMWSSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -858,16 +873,17 @@ class AmazonMWSSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonMWSSource' # type: str self.query = query @@ -879,7 +895,7 @@ class AmazonRedshiftLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -889,25 +905,25 @@ class AmazonRedshiftLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password of the Amazon Redshift source. :type password: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). - :type port: object + :type port: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -934,17 +950,17 @@ class AmazonRedshiftLinkedService(LinkedService): def __init__( self, *, - server: object, - database: object, - additional_properties: Optional[Dict[str, object]] = None, + server: Any, + database: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - port: Optional[object] = None, - encrypted_credential: Optional[object] = None, + port: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -964,26 +980,29 @@ class AmazonRedshiftSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. @@ -1000,6 +1019,7 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -1009,17 +1029,18 @@ class AmazonRedshiftSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, **kwargs ): - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings @@ -1032,35 +1053,35 @@ class AmazonRedshiftTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The Amazon Redshift table name. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -1087,16 +1108,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -1113,7 +1134,7 @@ class AmazonS3CompatibleLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -1123,10 +1144,10 @@ class AmazonS3CompatibleLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param access_key_id: The access key identifier of the Amazon S3 Compatible Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Amazon S3 Compatible Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -1134,14 +1155,14 @@ class AmazonS3CompatibleLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param force_path_style: If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression with resultType boolean). - :type force_path_style: object + :type force_path_style: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -1165,16 +1186,16 @@ class AmazonS3CompatibleLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + access_key_id: Optional[Any] = None, secret_access_key: Optional["SecretBase"] = None, - service_url: Optional[object] = None, - force_path_style: Optional[object] = None, - encrypted_credential: Optional[object] = None, + service_url: Optional[Any] = None, + force_path_style: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AmazonS3CompatibleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -1196,15 +1217,15 @@ class DatasetLocation(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -1225,9 +1246,9 @@ class DatasetLocation(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(DatasetLocation, self).__init__(**kwargs) @@ -1244,21 +1265,21 @@ class AmazonS3CompatibleLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of Amazon S3 Compatible. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -1277,11 +1298,11 @@ class AmazonS3CompatibleLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - bucket_name: Optional[object] = None, - version: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + bucket_name: Optional[Any] = None, + version: Optional[Any] = None, **kwargs ): super(AmazonS3CompatibleLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -1300,12 +1321,15 @@ class StoreReadSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -1316,6 +1340,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1325,14 +1350,16 @@ class StoreReadSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, **kwargs ): super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1342,42 +1369,45 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the S3 Compatible object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -1388,6 +1418,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1403,21 +1434,22 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - prefix: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + prefix: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3CompatibleReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1438,44 +1470,44 @@ class AmazonS3Dataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). - :type key: object + :type key: any :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param version: The version for the S3 object. Type: string (or Expression with resultType string). - :type version: object + :type version: any :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 object. @@ -1512,19 +1544,19 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - bucket_name: object, - additional_properties: Optional[Dict[str, object]] = None, + bucket_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - key: Optional[object] = None, - prefix: Optional[object] = None, - version: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + key: Optional[Any] = None, + prefix: Optional[Any] = None, + version: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, format: Optional["DatasetStorageFormat"] = None, compression: Optional["DatasetCompression"] = None, **kwargs @@ -1548,7 +1580,7 @@ class AmazonS3LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -1558,26 +1590,26 @@ class AmazonS3LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). - :type authentication_type: object + :type authentication_type: any :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param session_token: The session token for the S3 temporary security credential. :type session_token: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -1602,17 +1634,17 @@ class AmazonS3LinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - access_key_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + authentication_type: Optional[Any] = None, + access_key_id: Optional[Any] = None, secret_access_key: Optional["SecretBase"] = None, - service_url: Optional[object] = None, + service_url: Optional[Any] = None, session_token: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -1632,21 +1664,21 @@ class AmazonS3Location(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of amazon S3. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -1665,11 +1697,11 @@ class AmazonS3Location(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - bucket_name: Optional[object] = None, - version: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + bucket_name: Optional[Any] = None, + version: Optional[Any] = None, **kwargs ): super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -1685,42 +1717,45 @@ class AmazonS3ReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -1731,6 +1766,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1746,21 +1782,22 @@ class AmazonS3ReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - prefix: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + prefix: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3ReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1774,14 +1811,69 @@ def __init__( self.modified_datetime_end = modified_datetime_end -class AppendVariableActivity(Activity): +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AppendVariableActivity, ExecutePipelineActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + } + + _subtype_map = { + 'type': {'AppendVariable': 'AppendVariableActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, Any]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + **kwargs + ): + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' # type: str + + +class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -1795,7 +1887,7 @@ class AppendVariableActivity(Activity): :param variable_name: Name of the variable whose value needs to be appended to. :type variable_name: str :param value: Value to be appended. Could be a static value or Expression. - :type value: object + :type value: any """ _validation = { @@ -1818,12 +1910,12 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, variable_name: Optional[str] = None, - value: Optional[object] = None, + value: Optional[Any] = None, **kwargs ): super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) @@ -1832,6 +1924,31 @@ def __init__( self.value = value +class ArmIdWrapper(msrest.serialization.Model): + """A wrapper for an ARM resource id. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmIdWrapper, self).__init__(**kwargs) + self.id = None + + class AvroDataset(Dataset): """Avro dataset. @@ -1839,31 +1956,31 @@ class AvroDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the avro storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: any :param avro_compression_level: :type avro_compression_level: int """ @@ -1885,7 +2002,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1893,15 +2010,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_codec: Optional[Any] = None, avro_compression_level: Optional[int] = None, **kwargs ): @@ -1922,13 +2039,13 @@ class DatasetStorageFormat(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -1949,9 +2066,9 @@ class DatasetStorageFormat(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - serializer: Optional[object] = None, - deserializer: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + serializer: Optional[Any] = None, + deserializer: Optional[Any] = None, **kwargs ): super(DatasetStorageFormat, self).__init__(**kwargs) @@ -1968,13 +2085,13 @@ class AvroFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -1991,9 +2108,9 @@ class AvroFormat(DatasetStorageFormat): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - serializer: Optional[object] = None, - deserializer: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + serializer: Optional[Any] = None, + deserializer: Optional[Any] = None, **kwargs ): super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) @@ -2004,30 +2121,33 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -2042,21 +2162,23 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, **kwargs ): super(CopySink, self).__init__(**kwargs) @@ -2067,6 +2189,7 @@ def __init__( self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AvroSink(CopySink): @@ -2076,24 +2199,27 @@ class AvroSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Avro format settings. @@ -2112,6 +2238,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -2119,17 +2246,18 @@ class AvroSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -2142,18 +2270,21 @@ class AvroSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -2171,6 +2302,7 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -2178,15 +2310,16 @@ class AvroSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -2202,7 +2335,7 @@ class FormatWriteSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str """ @@ -2223,7 +2356,7 @@ class FormatWriteSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(FormatWriteSettings, self).__init__(**kwargs) @@ -2238,7 +2371,7 @@ class AvroWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param record_name: Top level record name in write result, which is required in AVRO spec. @@ -2247,11 +2380,11 @@ class AvroWriteSettings(FormatWriteSettings): :type record_namespace: str :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -2270,11 +2403,11 @@ class AvroWriteSettings(FormatWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, - max_rows_per_file: Optional[object] = None, - file_name_prefix: Optional[object] = None, + max_rows_per_file: Optional[Any] = None, + file_name_prefix: Optional[Any] = None, **kwargs ): super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -2356,7 +2489,7 @@ class AzureBatchLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -2366,24 +2499,24 @@ class AzureBatchLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). - :type account_name: object + :type account_name: any :param access_key: The Azure Batch account access key. :type access_key: ~azure.mgmt.datafactory.models.SecretBase :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). - :type batch_uri: object + :type batch_uri: any :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). - :type pool_name: object + :type pool_name: any :param linked_service_name: Required. The Azure Storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -2412,17 +2545,17 @@ class AzureBatchLinkedService(LinkedService): def __init__( self, *, - account_name: object, - batch_uri: object, - pool_name: object, + account_name: Any, + batch_uri: Any, + pool_name: Any, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, access_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -2442,41 +2575,41 @@ class AzureBlobDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param table_root_location: The root of blob path. Type: string (or Expression with resultType string). - :type table_root_location: object + :type table_root_location: any :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -2511,18 +2644,18 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - folder_path: Optional[object] = None, - table_root_location: Optional[object] = None, - file_name: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + folder_path: Optional[Any] = None, + table_root_location: Optional[Any] = None, + file_name: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, format: Optional["DatasetStorageFormat"] = None, compression: Optional["DatasetCompression"] = None, **kwargs @@ -2545,32 +2678,32 @@ class AzureBlobFSDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param format: The format of the Azure Data Lake Storage Gen2 storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -2602,15 +2735,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, format: Optional["DatasetStorageFormat"] = None, compression: Optional["DatasetCompression"] = None, **kwargs @@ -2630,7 +2763,7 @@ class AzureBlobFSLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -2640,30 +2773,30 @@ class AzureBlobFSLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). - :type account_key: object + :type account_key: any :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -2690,18 +2823,18 @@ class AzureBlobFSLinkedService(LinkedService): def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - account_key: Optional[object] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + account_key: Optional[Any] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -2722,18 +2855,18 @@ class AzureBlobFSLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :type file_system: object + :type file_system: any """ _validation = { @@ -2751,10 +2884,10 @@ class AzureBlobFSLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - file_system: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + file_system: Optional[Any] = None, **kwargs ): super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -2769,39 +2902,42 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -2812,6 +2948,7 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2826,20 +2963,21 @@ class AzureBlobFSReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -2859,26 +2997,32 @@ class AzureBlobFSSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -2893,24 +3037,29 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = copy_behavior + self.metadata = metadata class AzureBlobFSSource(CopySource): @@ -2920,27 +3069,30 @@ class AzureBlobFSSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object + :type treat_empty_as_null: any :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object + :type skip_header_line_count: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any """ _validation = { @@ -2953,6 +3105,7 @@ class AzureBlobFSSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2961,16 +3114,17 @@ class AzureBlobFSSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - treat_empty_as_null: Optional[object] = None, - skip_header_line_count: Optional[object] = None, - recursive: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + treat_empty_as_null: Optional[Any] = None, + skip_header_line_count: Optional[Any] = None, + recursive: Optional[Any] = None, **kwargs ): - super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -2987,14 +3141,17 @@ class StoreWriteSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -3005,6 +3162,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -3015,15 +3173,17 @@ class StoreWriteSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, **kwargs ): super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection self.copy_behavior = copy_behavior @@ -3034,17 +3194,20 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object + :type block_size_in_mb: any """ _validation = { @@ -3055,6 +3218,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3062,13 +3226,14 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, - block_size_in_mb: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + block_size_in_mb: Optional[Any] = None, **kwargs ): - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3080,7 +3245,7 @@ class AzureBlobStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3090,16 +3255,16 @@ class AzureBlobStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is @@ -3107,17 +3272,17 @@ class AzureBlobStorageLinkedService(LinkedService): :type service_endpoint: str :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). @@ -3155,20 +3320,20 @@ class AzureBlobStorageLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, + sas_uri: Optional[Any] = None, sas_token: Optional["AzureKeyVaultSecretReference"] = None, service_endpoint: Optional[str] = None, - service_principal_id: Optional[object] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, account_kind: Optional[str] = None, encrypted_credential: Optional[str] = None, **kwargs @@ -3195,18 +3360,18 @@ class AzureBlobStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param container: Specify the container of azure blob. Type: string (or Expression with resultType string). - :type container: object + :type container: any """ _validation = { @@ -3224,10 +3389,10 @@ class AzureBlobStorageLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - container: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + container: Optional[Any] = None, **kwargs ): super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -3242,42 +3407,45 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -3288,6 +3456,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -3303,21 +3472,22 @@ class AzureBlobStorageReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - prefix: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + prefix: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -3338,17 +3508,20 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object + :type block_size_in_mb: any """ _validation = { @@ -3359,6 +3532,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3366,13 +3540,14 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, - block_size_in_mb: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + block_size_in_mb: Optional[Any] = None, **kwargs ): - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3384,31 +3559,31 @@ class AzureDatabricksDeltaLakeDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The name of delta table. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -3434,15 +3609,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table: Optional[object] = None, - database: Optional[object] = None, + table: Optional[Any] = None, + database: Optional[Any] = None, **kwargs ): super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -3461,7 +3636,7 @@ class ExportSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The export setting type.Constant filled by server. :type type: str """ @@ -3482,7 +3657,7 @@ class ExportSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ExportSettings, self).__init__(**kwargs) @@ -3497,15 +3672,15 @@ class AzureDatabricksDeltaLakeExportCommand(ExportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The export setting type.Constant filled by server. :type type: str :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: object + :type date_format: any :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: object + :type timestamp_format: any """ _validation = { @@ -3522,9 +3697,9 @@ class AzureDatabricksDeltaLakeExportCommand(ExportSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - date_format: Optional[object] = None, - timestamp_format: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + date_format: Optional[Any] = None, + timestamp_format: Optional[Any] = None, **kwargs ): super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) @@ -3543,7 +3718,7 @@ class ImportSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The import setting type.Constant filled by server. :type type: str """ @@ -3564,7 +3739,7 @@ class ImportSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ImportSettings, self).__init__(**kwargs) @@ -3579,15 +3754,15 @@ class AzureDatabricksDeltaLakeImportCommand(ImportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The import setting type.Constant filled by server. :type type: str :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type date_format: object + :type date_format: any :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). - :type timestamp_format: object + :type timestamp_format: any """ _validation = { @@ -3604,9 +3779,9 @@ class AzureDatabricksDeltaLakeImportCommand(ImportSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - date_format: Optional[object] = None, - timestamp_format: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + date_format: Optional[Any] = None, + timestamp_format: Optional[Any] = None, **kwargs ): super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) @@ -3622,7 +3797,7 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3632,21 +3807,21 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: object + :type domain: any :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param cluster_id: The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). - :type cluster_id: object + :type cluster_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -3670,15 +3845,15 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): def __init__( self, *, - domain: object, - additional_properties: Optional[Dict[str, object]] = None, + domain: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, access_token: Optional["SecretBase"] = None, - cluster_id: Optional[object] = None, - encrypted_credential: Optional[object] = None, + cluster_id: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -3696,27 +3871,30 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param import_settings: Azure Databricks Delta Lake import settings. :type import_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand """ @@ -3733,6 +3911,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3740,17 +3919,18 @@ class AzureDatabricksDeltaLakeSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -3763,21 +3943,24 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param export_settings: Azure Databricks Delta Lake export settings. :type export_settings: ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand """ @@ -3792,6 +3975,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3799,15 +3983,16 @@ class AzureDatabricksDeltaLakeSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSource' # type: str self.query = query self.export_settings = export_settings @@ -3820,7 +4005,7 @@ class AzureDatabricksLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -3830,72 +4015,72 @@ class AzureDatabricksLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). - :type domain: object + :type domain: any :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication: Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type authentication: object + :type authentication: any :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). - :type workspace_resource_id: object + :type workspace_resource_id: any :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object + :type existing_cluster_id: any :param instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object + :type instance_pool_id: any :param new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type new_cluster_version: object + :type new_cluster_version: any :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means + auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and + can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object + :type new_cluster_num_of_worker: any :param new_cluster_node_type: The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object + :type new_cluster_node_type: any :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] + :type new_cluster_spark_conf: dict[str, any] :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] + :type new_cluster_spark_env_vars: dict[str, any] :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] + :type new_cluster_custom_tags: dict[str, any] :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: object + :type new_cluster_log_destination: any :param new_cluster_driver_node_type: The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object + :type new_cluster_driver_node_type: any :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object + :type new_cluster_init_scripts: any :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object + :type new_cluster_enable_elastic_disk: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). - :type policy_id: object + :type policy_id: any """ _validation = { @@ -3933,29 +4118,29 @@ class AzureDatabricksLinkedService(LinkedService): def __init__( self, *, - domain: object, - additional_properties: Optional[Dict[str, object]] = None, + domain: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, access_token: Optional["SecretBase"] = None, - authentication: Optional[object] = None, - workspace_resource_id: Optional[object] = None, - existing_cluster_id: Optional[object] = None, - instance_pool_id: Optional[object] = None, - new_cluster_version: Optional[object] = None, - new_cluster_num_of_worker: Optional[object] = None, - new_cluster_node_type: Optional[object] = None, - new_cluster_spark_conf: Optional[Dict[str, object]] = None, - new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, - new_cluster_custom_tags: Optional[Dict[str, object]] = None, - new_cluster_log_destination: Optional[object] = None, - new_cluster_driver_node_type: Optional[object] = None, - new_cluster_init_scripts: Optional[object] = None, - new_cluster_enable_elastic_disk: Optional[object] = None, - encrypted_credential: Optional[object] = None, - policy_id: Optional[object] = None, + authentication: Optional[Any] = None, + workspace_resource_id: Optional[Any] = None, + existing_cluster_id: Optional[Any] = None, + instance_pool_id: Optional[Any] = None, + new_cluster_version: Optional[Any] = None, + new_cluster_num_of_worker: Optional[Any] = None, + new_cluster_node_type: Optional[Any] = None, + new_cluster_spark_conf: Optional[Dict[str, Any]] = None, + new_cluster_spark_env_vars: Optional[Dict[str, Any]] = None, + new_cluster_custom_tags: Optional[Dict[str, Any]] = None, + new_cluster_log_destination: Optional[Any] = None, + new_cluster_driver_node_type: Optional[Any] = None, + new_cluster_init_scripts: Optional[Any] = None, + new_cluster_enable_elastic_disk: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, + policy_id: Optional[Any] = None, **kwargs ): super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -3990,7 +4175,7 @@ class ExecutionActivity(Activity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -4031,7 +4216,7 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -4052,7 +4237,7 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -4069,10 +4254,10 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). - :type command: object + :type command: any :param command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :type command_timeout: any """ _validation = { @@ -4098,14 +4283,14 @@ def __init__( self, *, name: str, - command: object, - additional_properties: Optional[Dict[str, object]] = None, + command: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - command_timeout: Optional[object] = None, + command_timeout: Optional[Any] = None, **kwargs ): super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -4121,7 +4306,7 @@ class AzureDataExplorerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4131,23 +4316,23 @@ class AzureDataExplorerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://:code:``.:code:``.kusto.windows.net. Type: string (or Expression with resultType string). - :type endpoint: object + :type endpoint: any :param service_principal_id: The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any """ _validation = { @@ -4173,16 +4358,16 @@ class AzureDataExplorerLinkedService(LinkedService): def __init__( self, *, - endpoint: object, - database: object, - additional_properties: Optional[Dict[str, object]] = None, + endpoint: Any, + database: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, + tenant: Optional[Any] = None, **kwargs ): super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4201,33 +4386,36 @@ class AzureDataExplorerSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object + :type ingestion_mapping_name: any :param ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. - :type ingestion_mapping_as_json: object + :type ingestion_mapping_as_json: any :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. - :type flush_immediately: object + :type flush_immediately: any """ _validation = { @@ -4242,6 +4430,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -4250,18 +4439,19 @@ class AzureDataExplorerSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - ingestion_mapping_name: Optional[object] = None, - ingestion_mapping_as_json: Optional[object] = None, - flush_immediately: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + ingestion_mapping_name: Optional[Any] = None, + ingestion_mapping_as_json: Optional[Any] = None, + flush_immediately: Optional[Any] = None, **kwargs ): - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json @@ -4275,27 +4465,30 @@ class AzureDataExplorerSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. - :type no_truncation: object + :type no_truncation: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -4312,6 +4505,7 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -4321,17 +4515,18 @@ class AzureDataExplorerSource(CopySource): def __init__( self, *, - query: object, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - no_truncation: Optional[object] = None, - query_timeout: Optional[object] = None, + query: Any, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + no_truncation: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSource' # type: str self.query = query self.no_truncation = no_truncation @@ -4346,29 +4541,29 @@ class AzureDataExplorerTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -4393,14 +4588,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table: Optional[object] = None, + table: Optional[Any] = None, **kwargs ): super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -4415,7 +4610,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4425,32 +4620,32 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). - :type account_name: object + :type account_name: any :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object + :type subscription_id: any :param resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object + :type resource_group_name: any :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). - :type data_lake_analytics_uri: object + :type data_lake_analytics_uri: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -4479,19 +4674,19 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): def __init__( self, *, - account_name: object, - tenant: object, - additional_properties: Optional[Dict[str, object]] = None, + account_name: Any, + tenant: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - subscription_id: Optional[object] = None, - resource_group_name: Optional[object] = None, - data_lake_analytics_uri: Optional[object] = None, - encrypted_credential: Optional[object] = None, + subscription_id: Optional[Any] = None, + resource_group_name: Optional[Any] = None, + data_lake_analytics_uri: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4513,32 +4708,32 @@ class AzureDataLakeStoreDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param format: The format of the Data Lake Store. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the item(s) in the Azure Data Lake @@ -4571,15 +4766,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, format: Optional["DatasetStorageFormat"] = None, compression: Optional["DatasetCompression"] = None, **kwargs @@ -4599,7 +4794,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -4609,36 +4804,36 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression with resultType string). - :type data_lake_store_uri: object + :type data_lake_store_uri: any :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :type account_name: object + :type account_name: any :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object + :type subscription_id: any :param resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object + :type resource_group_name: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -4667,20 +4862,20 @@ class AzureDataLakeStoreLinkedService(LinkedService): def __init__( self, *, - data_lake_store_uri: object, - additional_properties: Optional[Dict[str, object]] = None, + data_lake_store_uri: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - account_name: Optional[object] = None, - subscription_id: Optional[object] = None, - resource_group_name: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + account_name: Optional[Any] = None, + subscription_id: Optional[Any] = None, + resource_group_name: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -4703,15 +4898,15 @@ class AzureDataLakeStoreLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -4728,9 +4923,9 @@ class AzureDataLakeStoreLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -4744,47 +4939,50 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_after: object + :type list_after: any :param list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). - :type list_before: object + :type list_before: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -4795,6 +4993,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4811,22 +5010,23 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - file_list_path: Optional[object] = None, - list_after: Optional[object] = None, - list_before: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + file_list_path: Optional[Any] = None, + list_after: Optional[Any] = None, + list_before: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -4848,28 +5048,31 @@ class AzureDataLakeStoreSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object + :type enable_adls_single_file_parallel: any """ _validation = { @@ -4884,6 +5087,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4891,17 +5095,18 @@ class AzureDataLakeStoreSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, - enable_adls_single_file_parallel: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + enable_adls_single_file_parallel: Optional[Any] = None, **kwargs ): - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel @@ -4914,21 +5119,24 @@ class AzureDataLakeStoreSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any """ _validation = { @@ -4941,20 +5149,22 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, **kwargs ): - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = recursive @@ -4966,18 +5176,21 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). - :type expiry_date_time: object + :type expiry_date_time: any """ _validation = { @@ -4988,6 +5201,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -4995,13 +5209,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, - expiry_date_time: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + expiry_date_time: Optional[Any] = None, **kwargs ): - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = expiry_date_time @@ -5013,7 +5228,7 @@ class AzureFileStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5023,34 +5238,34 @@ class AzureFileStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Host name of the server. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: object + :type user_id: any :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). - :type file_share: object + :type file_share: any :param snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). - :type snapshot: object + :type snapshot: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5079,21 +5294,21 @@ class AzureFileStorageLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host: Optional[object] = None, - user_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + host: Optional[Any] = None, + user_id: Optional[Any] = None, password: Optional["SecretBase"] = None, - connection_string: Optional[object] = None, + connection_string: Optional[Any] = None, account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, + sas_uri: Optional[Any] = None, sas_token: Optional["AzureKeyVaultSecretReference"] = None, - file_share: Optional[object] = None, - snapshot: Optional[object] = None, - encrypted_credential: Optional[object] = None, + file_share: Optional[Any] = None, + snapshot: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -5117,15 +5332,15 @@ class AzureFileStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -5142,9 +5357,9 @@ class AzureFileStorageLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -5158,42 +5373,45 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -5204,6 +5422,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -5219,21 +5438,22 @@ class AzureFileStorageReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - prefix: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + prefix: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -5254,14 +5474,17 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -5272,18 +5495,20 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, **kwargs ): - super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureFileStorageWriteSettings' # type: str @@ -5294,7 +5519,7 @@ class AzureFunctionActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -5314,14 +5539,14 @@ class AzureFunctionActivity(ExecutionActivity): :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod :param function_name: Required. Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string). - :type function_name: object + :type function_name: any :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: object + :type headers: any :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :type body: any """ _validation = { @@ -5351,15 +5576,15 @@ def __init__( *, name: str, method: Union[str, "AzureFunctionActivityMethod"], - function_name: object, - additional_properties: Optional[Dict[str, object]] = None, + function_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - headers: Optional[object] = None, - body: Optional[object] = None, + headers: Optional[Any] = None, + body: Optional[Any] = None, **kwargs ): super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -5377,7 +5602,7 @@ class AzureFunctionLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5387,16 +5612,16 @@ class AzureFunctionLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://:code:``.azurewebsites.net. - :type function_app_url: object + :type function_app_url: any :param function_key: Function or Host key for Azure Function App. :type function_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5419,14 +5644,14 @@ class AzureFunctionLinkedService(LinkedService): def __init__( self, *, - function_app_url: object, - additional_properties: Optional[Dict[str, object]] = None, + function_app_url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, function_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -5443,7 +5668,7 @@ class AzureKeyVaultLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5453,10 +5678,10 @@ class AzureKeyVaultLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object + :type base_url: any """ _validation = { @@ -5477,12 +5702,12 @@ class AzureKeyVaultLinkedService(LinkedService): def __init__( self, *, - base_url: object, - additional_properties: Optional[Dict[str, object]] = None, + base_url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -5533,10 +5758,10 @@ class AzureKeyVaultSecretReference(SecretBase): :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). - :type secret_name: object + :type secret_name: any :param secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object + :type secret_version: any """ _validation = { @@ -5556,8 +5781,8 @@ def __init__( self, *, store: "LinkedServiceReference", - secret_name: object, - secret_version: Optional[object] = None, + secret_name: Any, + secret_version: Optional[Any] = None, **kwargs ): super(AzureKeyVaultSecretReference, self).__init__(**kwargs) @@ -5574,7 +5799,7 @@ class AzureMariaDBLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5584,16 +5809,16 @@ class AzureMariaDBLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -5615,14 +5840,14 @@ class AzureMariaDBLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -5639,27 +5864,30 @@ class AzureMariaDBSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -5672,6 +5900,7 @@ class AzureMariaDBSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5680,16 +5909,17 @@ class AzureMariaDBSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -5701,28 +5931,28 @@ class AzureMariaDBTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -5747,14 +5977,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -5769,7 +5999,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -5788,7 +6018,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. - :type global_parameters: dict[str, object] + :type global_parameters: dict[str, any] :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution @@ -5824,13 +6054,13 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - global_parameters: Optional[Dict[str, object]] = None, + global_parameters: Optional[Dict[str, Any]] = None, web_service_outputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, web_service_inputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, **kwargs @@ -5849,7 +6079,7 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -5866,35 +6096,35 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). - :type ml_pipeline_id: object + :type ml_pipeline_id: any :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :type ml_pipeline_endpoint_id: object + :type ml_pipeline_endpoint_id: any :param version: Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). - :type version: object + :type version: any :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type experiment_name: object + :type experiment_name: any :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object + :type ml_pipeline_parameters: any :param data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - :type data_path_assignments: object + :type data_path_assignments: any :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). - :type ml_parent_run_id: object + :type ml_parent_run_id: any :param continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). - :type continue_on_step_failure: object + :type continue_on_step_failure: any """ _validation = { @@ -5925,20 +6155,20 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - ml_pipeline_id: Optional[object] = None, - ml_pipeline_endpoint_id: Optional[object] = None, - version: Optional[object] = None, - experiment_name: Optional[object] = None, - ml_pipeline_parameters: Optional[object] = None, - data_path_assignments: Optional[object] = None, - ml_parent_run_id: Optional[object] = None, - continue_on_step_failure: Optional[object] = None, + ml_pipeline_id: Optional[Any] = None, + ml_pipeline_endpoint_id: Optional[Any] = None, + version: Optional[Any] = None, + experiment_name: Optional[Any] = None, + ml_pipeline_parameters: Optional[Any] = None, + data_path_assignments: Optional[Any] = None, + ml_parent_run_id: Optional[Any] = None, + continue_on_step_failure: Optional[Any] = None, **kwargs ): super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -5960,7 +6190,7 @@ class AzureMLLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -5970,29 +6200,29 @@ class AzureMLLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object + :type ml_endpoint: any :param api_key: Required. The API key for accessing the Azure ML model endpoint. :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object + :type update_resource_endpoint: any :param service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -6020,18 +6250,18 @@ class AzureMLLinkedService(LinkedService): def __init__( self, *, - ml_endpoint: object, + ml_endpoint: Any, api_key: "SecretBase", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - update_resource_endpoint: Optional[object] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + update_resource_endpoint: Optional[Any] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -6052,7 +6282,7 @@ class AzureMLServiceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6062,30 +6292,30 @@ class AzureMLServiceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). - :type subscription_id: object + :type subscription_id: any :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). - :type resource_group_name: object + :type resource_group_name: any :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or Expression with resultType string). - :type ml_workspace_name: object + :type ml_workspace_name: any :param service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -6114,18 +6344,18 @@ class AzureMLServiceLinkedService(LinkedService): def __init__( self, *, - subscription_id: object, - resource_group_name: object, - ml_workspace_name: object, - additional_properties: Optional[Dict[str, object]] = None, + subscription_id: Any, + resource_group_name: Any, + ml_workspace_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -6146,7 +6376,7 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -6163,14 +6393,14 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object + :type trained_model_name: any :param trained_model_linked_service_name: Required. Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). - :type trained_model_file_path: object + :type trained_model_file_path: any """ _validation = { @@ -6199,10 +6429,10 @@ def __init__( self, *, name: str, - trained_model_name: object, + trained_model_name: Any, trained_model_linked_service_name: "LinkedServiceReference", - trained_model_file_path: object, - additional_properties: Optional[Dict[str, object]] = None, + trained_model_file_path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -6224,7 +6454,7 @@ class AzureMLWebServiceFile(msrest.serialization.Model): :param file_path: Required. The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object + :type file_path: any :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference @@ -6243,7 +6473,7 @@ class AzureMLWebServiceFile(msrest.serialization.Model): def __init__( self, *, - file_path: object, + file_path: Any, linked_service_name: "LinkedServiceReference", **kwargs ): @@ -6259,7 +6489,7 @@ class AzureMySqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6269,16 +6499,16 @@ class AzureMySqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -6301,14 +6531,14 @@ class AzureMySqlLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -6325,27 +6555,30 @@ class AzureMySqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -6360,22 +6593,24 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, **kwargs ): - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6387,26 +6622,29 @@ class AzureMySqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -6419,6 +6657,7 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -6427,16 +6666,17 @@ class AzureMySqlSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query @@ -6448,32 +6688,32 @@ class AzureMySqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -6499,15 +6739,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -6523,7 +6763,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6533,16 +6773,16 @@ class AzurePostgreSqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -6564,14 +6804,14 @@ class AzurePostgreSqlLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -6588,27 +6828,30 @@ class AzurePostgreSqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -6623,22 +6866,24 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, **kwargs ): - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6650,27 +6895,30 @@ class AzurePostgreSqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -6683,6 +6931,7 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -6691,16 +6940,17 @@ class AzurePostgreSqlSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query @@ -6712,35 +6962,35 @@ class AzurePostgreSqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -6767,16 +7017,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -6793,24 +7043,27 @@ class AzureQueueSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any """ _validation = { @@ -6825,20 +7078,22 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, **kwargs ): - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureQueueSink' # type: str @@ -6849,29 +7104,29 @@ class AzureSearchIndexDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). - :type index_name: object + :type index_name: any """ _validation = { @@ -6897,13 +7152,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - index_name: object, - additional_properties: Optional[Dict[str, object]] = None, + index_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -6919,24 +7174,27 @@ class AzureSearchIndexSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType @@ -6954,22 +7212,24 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = write_behavior @@ -6981,7 +7241,7 @@ class AzureSearchLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -6991,16 +7251,16 @@ class AzureSearchLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param key: Admin Key for Azure Search service. :type key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -7023,14 +7283,14 @@ class AzureSearchLinkedService(LinkedService): def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7047,7 +7307,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -7057,29 +7317,31 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -7101,23 +7363,25 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, + always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7129,6 +7393,7 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings class AzureSqlDWLinkedService(LinkedService): @@ -7138,7 +7403,7 @@ class AzureSqlDWLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -7148,29 +7413,29 @@ class AzureSqlDWLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -7197,18 +7462,18 @@ class AzureSqlDWLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7229,35 +7494,35 @@ class AzureSqlDWTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -7284,16 +7549,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -7310,7 +7575,7 @@ class AzureSqlMILinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -7320,29 +7585,31 @@ class AzureSqlMILinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -7364,23 +7631,25 @@ class AzureSqlMILinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, + always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -7392,6 +7661,7 @@ def __init__( self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings class AzureSqlMITableDataset(Dataset): @@ -7401,35 +7671,35 @@ class AzureSqlMITableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -7456,16 +7726,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -7482,42 +7752,45 @@ class AzureSqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -7532,6 +7805,7 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -7543,21 +7817,22 @@ class AzureSqlSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - sql_writer_stored_procedure_name: Optional[object] = None, - sql_writer_table_type: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + sql_writer_stored_procedure_name: Optional[Any] = None, + sql_writer_table_type: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[object] = None, - table_option: Optional[object] = None, + stored_procedure_table_type_parameter_name: Optional[Any] = None, + table_option: Optional[Any] = None, **kwargs ): - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -7574,39 +7849,42 @@ class AzureSqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object + :type produce_additional_types: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -7621,6 +7899,7 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -7634,21 +7913,22 @@ class AzureSqlSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - sql_reader_query: Optional[object] = None, - sql_reader_stored_procedure_name: Optional[object] = None, + sql_reader_query: Optional[Any] = None, + sql_reader_stored_procedure_name: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - produce_additional_types: Optional[object] = None, - partition_option: Optional[object] = None, + produce_additional_types: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -7665,35 +7945,35 @@ class AzureSqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -7720,16 +8000,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -7746,7 +8026,7 @@ class AzureStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -7756,15 +8036,15 @@ class AzureStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -7794,14 +8074,14 @@ class AzureStorageLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, + sas_uri: Optional[Any] = None, sas_token: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs @@ -7822,29 +8102,29 @@ class AzureTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -7870,13 +8150,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - table_name: object, - additional_properties: Optional[Dict[str, object]] = None, + table_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -7892,36 +8172,39 @@ class AzureTableSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object + :type azure_table_default_partition_key_value: any :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object + :type azure_table_partition_key_name: any :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). - :type azure_table_row_key_name: object + :type azure_table_row_key_name: any :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). - :type azure_table_insert_type: object + :type azure_table_insert_type: any """ _validation = { @@ -7936,6 +8219,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7945,19 +8229,20 @@ class AzureTableSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - azure_table_default_partition_key_value: Optional[object] = None, - azure_table_partition_key_name: Optional[object] = None, - azure_table_row_key_name: Optional[object] = None, - azure_table_insert_type: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + azure_table_default_partition_key_value: Optional[Any] = None, + azure_table_partition_key_name: Optional[Any] = None, + azure_table_row_key_name: Optional[Any] = None, + azure_table_insert_type: Optional[Any] = None, **kwargs ): - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name @@ -7972,30 +8257,33 @@ class AzureTableSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). - :type azure_table_source_query: object + :type azure_table_source_query: any :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). - :type azure_table_source_ignore_table_not_found: object + :type azure_table_source_ignore_table_not_found: any """ _validation = { @@ -8008,6 +8296,7 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, @@ -8017,17 +8306,18 @@ class AzureTableSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - azure_table_source_query: Optional[object] = None, - azure_table_source_ignore_table_not_found: Optional[object] = None, + azure_table_source_query: Optional[Any] = None, + azure_table_source_ignore_table_not_found: Optional[Any] = None, **kwargs ): - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found @@ -8040,7 +8330,7 @@ class AzureTableStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -8050,15 +8340,15 @@ class AzureTableStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object + :type sas_uri: any :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -8088,14 +8378,14 @@ class AzureTableStorageLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, + sas_uri: Optional[Any] = None, sas_token: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs @@ -8116,23 +8406,23 @@ class BinaryDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -8165,12 +8455,12 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, compression: Optional["DatasetCompression"] = None, @@ -8192,7 +8482,7 @@ class FormatReadSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str """ @@ -8213,7 +8503,7 @@ class FormatReadSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(FormatReadSettings, self).__init__(**kwargs) @@ -8228,7 +8518,7 @@ class BinaryReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. @@ -8248,7 +8538,7 @@ class BinaryReadSettings(FormatReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): @@ -8264,24 +8554,27 @@ class BinarySink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ @@ -8298,22 +8591,24 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySink' # type: str self.store_settings = store_settings @@ -8325,18 +8620,21 @@ class BinarySource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Binary format settings. @@ -8353,6 +8651,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -8360,15 +8659,16 @@ class BinarySource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["BinaryReadSettings"] = None, **kwargs ): - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -8386,7 +8686,7 @@ class Trigger(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -8395,7 +8695,7 @@ class Trigger(msrest.serialization.Model): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] """ _validation = { @@ -8418,9 +8718,9 @@ class Trigger(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(Trigger, self).__init__(**kwargs) @@ -8443,7 +8743,7 @@ class MultiplePipelineTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -8452,7 +8752,7 @@ class MultiplePipelineTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ @@ -8478,9 +8778,9 @@ class MultiplePipelineTrigger(Trigger): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): @@ -8498,7 +8798,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -8507,7 +8807,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to @@ -8522,7 +8822,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypesEnum] + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -8551,11 +8851,11 @@ class BlobEventsTrigger(MultiplePipelineTrigger): def __init__( self, *, - events: List[Union[str, "BlobEventTypesEnum"]], + events: List[Union[str, "BlobEventTypes"]], scope: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, pipelines: Optional[List["TriggerPipelineReference"]] = None, blob_path_begins_with: Optional[str] = None, blob_path_ends_with: Optional[str] = None, @@ -8578,35 +8878,41 @@ class BlobSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object + :type blob_writer_overwrite_files: any :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). - :type blob_writer_date_time_format: object + :type blob_writer_date_time_format: any :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). - :type blob_writer_add_header: object + :type blob_writer_add_header: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -8621,33 +8927,38 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - blob_writer_overwrite_files: Optional[object] = None, - blob_writer_date_time_format: Optional[object] = None, - blob_writer_add_header: Optional[object] = None, - copy_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + blob_writer_overwrite_files: Optional[Any] = None, + blob_writer_date_time_format: Optional[Any] = None, + blob_writer_add_header: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header self.copy_behavior = copy_behavior + self.metadata = metadata class BlobSource(CopySource): @@ -8657,27 +8968,30 @@ class BlobSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object + :type treat_empty_as_null: any :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object + :type skip_header_line_count: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any """ _validation = { @@ -8690,6 +9004,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -8698,16 +9013,17 @@ class BlobSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - treat_empty_as_null: Optional[object] = None, - skip_header_line_count: Optional[object] = None, - recursive: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + treat_empty_as_null: Optional[Any] = None, + skip_header_line_count: Optional[Any] = None, + recursive: Optional[Any] = None, **kwargs ): - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -8723,7 +9039,7 @@ class BlobTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -8732,7 +9048,7 @@ class BlobTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. @@ -8770,9 +9086,9 @@ def __init__( folder_path: str, max_concurrency: int, linked_service: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): @@ -8790,7 +9106,7 @@ class CassandraLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -8800,25 +9116,25 @@ class CassandraLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. Host name for connection. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). - :type authentication_type: object + :type authentication_type: any :param port: The port for the connection. Type: integer (or Expression with resultType integer). - :type port: object + :type port: any :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -8844,17 +9160,17 @@ class CassandraLinkedService(LinkedService): def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - port: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + authentication_type: Optional[Any] = None, + port: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -8874,27 +9190,30 @@ class CassandraSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of @@ -8915,6 +9234,7 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -8924,17 +9244,18 @@ class CassandraSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CassandraSource' # type: str self.query = query self.consistency_level = consistency_level @@ -8947,32 +9268,32 @@ class CassandraTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :type keyspace: object + :type keyspace: any """ _validation = { @@ -8998,15 +9319,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - keyspace: Optional[object] = None, + table_name: Optional[Any] = None, + keyspace: Optional[Any] = None, **kwargs ): super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -9024,7 +9345,7 @@ class ChainingTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -9033,7 +9354,7 @@ class ChainingTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference @@ -9069,9 +9390,9 @@ def __init__( pipeline: "TriggerPipelineReference", depends_on: List["PipelineReference"], run_dimension: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) @@ -9132,9 +9453,9 @@ class CmdkeySetup(CustomSetupBase): :param type: Required. The type of custom setup.Constant filled by server. :type type: str :param target_name: Required. The server name of data source access. - :type target_name: object + :type target_name: any :param user_name: Required. The user name of data source access. - :type user_name: object + :type user_name: any :param password: Required. The password of data source access. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -9156,8 +9477,8 @@ class CmdkeySetup(CustomSetupBase): def __init__( self, *, - target_name: object, - user_name: object, + target_name: Any, + user_name: Any, password: "SecretBase", **kwargs ): @@ -9197,29 +9518,29 @@ class CommonDataServiceForAppsEntityDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: object + :type entity_name: any """ _validation = { @@ -9244,14 +9565,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, + entity_name: Optional[Any] = None, **kwargs ): super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -9266,7 +9587,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -9276,49 +9597,46 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType + string). + :type deployment_type: any :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object + :type host_name: any :param port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object + :type service_uri: any :param organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object + :type organization_name: any :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: any :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password to access the Common Data Service for Apps instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: any :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -9328,7 +9646,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -9344,16 +9662,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9361,23 +9679,23 @@ class CommonDataServiceForAppsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + deployment_type: Any, + authentication_type: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + host_name: Optional[Any] = None, + port: Optional[Any] = None, + service_uri: Optional[Any] = None, + organization_name: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_id: Optional[Any] = None, + service_principal_credential_type: Optional[Any] = None, service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -9403,34 +9721,37 @@ class CommonDataServiceForAppsSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object + :type alternate_key_name: any """ _validation = { @@ -9446,6 +9767,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -9455,17 +9777,18 @@ def __init__( self, *, write_behavior: Union[str, "DynamicsSinkWriteBehavior"], - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - ignore_null_values: Optional[object] = None, - alternate_key_name: Optional[object] = None, - **kwargs - ): - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + ignore_null_values: Optional[Any] = None, + alternate_key_name: Optional[Any] = None, + **kwargs + ): + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -9479,21 +9802,24 @@ class CommonDataServiceForAppsSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -9509,6 +9835,7 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -9516,15 +9843,16 @@ class CommonDataServiceForAppsSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -9577,7 +9905,7 @@ class CompressionReadSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str """ @@ -9598,7 +9926,7 @@ class CompressionReadSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(CompressionReadSettings, self).__init__(**kwargs) @@ -9613,7 +9941,7 @@ class ConcurLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -9623,31 +9951,31 @@ class ConcurLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object + :type client_id: any :param username: Required. The user name that you use to access Concur Service. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -9676,19 +10004,19 @@ class ConcurLinkedService(LinkedService): def __init__( self, *, - client_id: object, - username: object, - additional_properties: Optional[Dict[str, object]] = None, + client_id: Any, + username: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_properties: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_properties: Optional[Any] = None, password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -9710,28 +10038,28 @@ class ConcurObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -9756,14 +10084,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -9778,27 +10106,30 @@ class ConcurSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -9811,6 +10142,7 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -9819,16 +10151,17 @@ class ConcurSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ConcurSource' # type: str self.query = query @@ -9868,54 +10201,6 @@ def __init__( self.status = None -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - def __init__( - self, - *, - name: str, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - **kwargs - ): - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' # type: str - - class CopyActivity(ExecutionActivity): """Copy activity. @@ -9923,7 +10208,7 @@ class CopyActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -9947,22 +10232,22 @@ class CopyActivity(ExecutionActivity): :param sink: Required. Copy activity sink. :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object + :type translator: any :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object + :type enable_staging: any :param staging_settings: Specifies interim staging settings when EnableStaging is true. :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. - :type parallel_copies: object + :type parallel_copies: any :param data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object + :type data_integration_units: any :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object + :type enable_skip_incompatible_row: any :param redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: @@ -9973,12 +10258,12 @@ class CopyActivity(ExecutionActivity): :param log_settings: Log settings customer needs provide when enabling log. :type log_settings: ~azure.mgmt.datafactory.models.LogSettings :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] + :type preserve_rules: list[any] :param preserve: Preserve rules. - :type preserve: list[object] + :type preserve: list[any] :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). - :type validate_data_consistency: object + :type validate_data_consistency: any :param skip_error_file: Specify the fault tolerance for data consistency. :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile """ @@ -10024,7 +10309,7 @@ def __init__( name: str, source: "CopySource", sink: "CopySink", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -10032,18 +10317,18 @@ def __init__( policy: Optional["ActivityPolicy"] = None, inputs: Optional[List["DatasetReference"]] = None, outputs: Optional[List["DatasetReference"]] = None, - translator: Optional[object] = None, - enable_staging: Optional[object] = None, + translator: Optional[Any] = None, + enable_staging: Optional[Any] = None, staging_settings: Optional["StagingSettings"] = None, - parallel_copies: Optional[object] = None, - data_integration_units: Optional[object] = None, - enable_skip_incompatible_row: Optional[object] = None, + parallel_copies: Optional[Any] = None, + data_integration_units: Optional[Any] = None, + enable_skip_incompatible_row: Optional[Any] = None, redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, log_storage_settings: Optional["LogStorageSettings"] = None, log_settings: Optional["LogSettings"] = None, - preserve_rules: Optional[List[object]] = None, - preserve: Optional[List[object]] = None, - validate_data_consistency: Optional[object] = None, + preserve_rules: Optional[List[Any]] = None, + preserve: Optional[List[Any]] = None, + validate_data_consistency: Optional[Any] = None, skip_error_file: Optional["SkipErrorFile"] = None, **kwargs ): @@ -10073,10 +10358,10 @@ class CopyActivityLogSettings(msrest.serialization.Model): :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: object + :type log_level: any :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: object + :type enable_reliable_logging: any """ _attribute_map = { @@ -10087,8 +10372,8 @@ class CopyActivityLogSettings(msrest.serialization.Model): def __init__( self, *, - log_level: Optional[object] = None, - enable_reliable_logging: Optional[object] = None, + log_level: Optional[Any] = None, + enable_reliable_logging: Optional[Any] = None, **kwargs ): super(CopyActivityLogSettings, self).__init__(**kwargs) @@ -10106,7 +10391,7 @@ class CopyTranslator(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy translator type.Constant filled by server. :type type: str """ @@ -10127,7 +10412,7 @@ class CopyTranslator(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(CopyTranslator, self).__init__(**kwargs) @@ -10142,7 +10427,7 @@ class CosmosDbLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -10152,21 +10437,21 @@ class CosmosDbLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string). - :type account_endpoint: object + :type account_endpoint: any :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). Possible values include: @@ -10181,18 +10466,18 @@ class CosmosDbLinkedService(LinkedService): :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param connection_mode: The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). Possible values include: "Gateway", "Direct". :type connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -10222,22 +10507,22 @@ class CosmosDbLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_endpoint: Optional[object] = None, - database: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, + account_endpoint: Optional[Any] = None, + database: Optional[Any] = None, account_key: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, + service_principal_id: Optional[Any] = None, service_principal_credential_type: Optional[Union[str, "CosmosDbServicePrincipalCredentialType"]] = None, service_principal_credential: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, connection_mode: Optional[Union[str, "CosmosDbConnectionMode"]] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -10262,29 +10547,29 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). - :type collection: object + :type collection: any """ _validation = { @@ -10310,13 +10595,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection: object, - additional_properties: Optional[Dict[str, object]] = None, + collection: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -10332,7 +10617,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -10342,14 +10627,14 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -10372,13 +10657,13 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - database: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + database: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -10394,28 +10679,31 @@ class CosmosDbMongoDbApiSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type write_behavior: object + :type write_behavior: any """ _validation = { @@ -10430,22 +10718,24 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - write_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + write_behavior: Optional[Any] = None, **kwargs ): - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior @@ -10457,32 +10747,35 @@ class CosmosDbMongoDbApiSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: object + :type filter: any :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -10498,6 +10791,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -10508,18 +10802,19 @@ class CosmosDbMongoDbApiSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - filter: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + filter: Optional[Any] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, - batch_size: Optional[object] = None, - query_timeout: Optional[object] = None, + batch_size: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -10535,29 +10830,29 @@ class CosmosDbSqlApiCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). - :type collection_name: object + :type collection_name: any """ _validation = { @@ -10583,13 +10878,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection_name: object, - additional_properties: Optional[Dict[str, object]] = None, + collection_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -10605,27 +10900,30 @@ class CosmosDbSqlApiSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: object + :type write_behavior: any """ _validation = { @@ -10640,22 +10938,24 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - write_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + write_behavior: Optional[Any] = None, **kwargs ): - super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior @@ -10667,29 +10967,32 @@ class CosmosDbSqlApiSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: SQL API query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param page_size: Page size of the result. Type: integer (or Expression with resultType integer). - :type page_size: object + :type page_size: any :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). - :type preferred_regions: object + :type preferred_regions: any :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). - :type detect_datetime: object + :type detect_datetime: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -10705,6 +11008,7 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, @@ -10715,18 +11019,19 @@ class CosmosDbSqlApiSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, - page_size: Optional[object] = None, - preferred_regions: Optional[object] = None, - detect_datetime: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, + page_size: Optional[Any] = None, + preferred_regions: Optional[Any] = None, + detect_datetime: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size @@ -10742,7 +11047,7 @@ class CouchbaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -10752,16 +11057,16 @@ class CouchbaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param cred_string: The Azure key vault secret reference of credString in connection string. :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -10783,14 +11088,14 @@ class CouchbaseLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, cred_string: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -10807,27 +11112,30 @@ class CouchbaseSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -10840,6 +11148,7 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -10848,16 +11157,17 @@ class CouchbaseSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CouchbaseSource' # type: str self.query = query @@ -10869,28 +11179,28 @@ class CouchbaseTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -10915,14 +11225,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -11067,7 +11377,7 @@ class CustomActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -11084,24 +11394,24 @@ class CustomActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. Command for custom activity Type: string (or Expression with resultType string). - :type command: object + :type command: any :param resource_linked_service: Resource linked service reference. :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param reference_objects: Reference objects. :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :param extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. - :type extended_properties: dict[str, object] + :type extended_properties: dict[str, any] :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object + :type retention_time_in_days: any :param auto_user_specification: Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). - :type auto_user_specification: object + :type auto_user_specification: any """ _validation = { @@ -11132,19 +11442,19 @@ def __init__( self, *, name: str, - command: object, - additional_properties: Optional[Dict[str, object]] = None, + command: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, resource_linked_service: Optional["LinkedServiceReference"] = None, - folder_path: Optional[object] = None, + folder_path: Optional[Any] = None, reference_objects: Optional["CustomActivityReferenceObject"] = None, - extended_properties: Optional[Dict[str, object]] = None, - retention_time_in_days: Optional[object] = None, - auto_user_specification: Optional[object] = None, + extended_properties: Optional[Dict[str, Any]] = None, + retention_time_in_days: Optional[Any] = None, + auto_user_specification: Optional[Any] = None, **kwargs ): super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -11191,28 +11501,28 @@ class CustomDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type_properties: Custom dataset properties. - :type type_properties: object + :type type_properties: any """ _validation = { @@ -11237,14 +11547,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - type_properties: Optional[object] = None, + type_properties: Optional[Any] = None, **kwargs ): super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -11259,7 +11569,7 @@ class CustomDataSourceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -11269,9 +11579,9 @@ class CustomDataSourceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param type_properties: Required. Custom linked service properties. - :type type_properties: object + :type type_properties: any """ _validation = { @@ -11292,12 +11602,12 @@ class CustomDataSourceLinkedService(LinkedService): def __init__( self, *, - type_properties: object, - additional_properties: Optional[Dict[str, object]] = None, + type_properties: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -11314,7 +11624,7 @@ class CustomEventsTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -11323,7 +11633,7 @@ class CustomEventsTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param subject_begins_with: The event subject must begin with the pattern provided for trigger @@ -11333,7 +11643,7 @@ class CustomEventsTrigger(MultiplePipelineTrigger): fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. :type subject_ends_with: str :param events: Required. The list of event types that cause this trigger to fire. - :type events: list[object] + :type events: list[any] :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. :type scope: str """ @@ -11361,11 +11671,11 @@ class CustomEventsTrigger(MultiplePipelineTrigger): def __init__( self, *, - events: List[object], + events: List[Any], scope: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, pipelines: Optional[List["TriggerPipelineReference"]] = None, subject_begins_with: Optional[str] = None, subject_ends_with: Optional[str] = None, @@ -11386,7 +11696,7 @@ class DatabricksNotebookActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -11404,12 +11714,12 @@ class DatabricksNotebookActivity(ExecutionActivity): :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). - :type notebook_path: object + :type notebook_path: any :param base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] + :type base_parameters: dict[str, any] :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :type libraries: list[dict[str, any]] """ _validation = { @@ -11436,15 +11746,15 @@ def __init__( self, *, name: str, - notebook_path: object, - additional_properties: Optional[Dict[str, object]] = None, + notebook_path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - base_parameters: Optional[Dict[str, object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + base_parameters: Optional[Dict[str, Any]] = None, + libraries: Optional[List[Dict[str, Any]]] = None, **kwargs ): super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -11461,7 +11771,7 @@ class DatabricksSparkJarActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -11479,11 +11789,11 @@ class DatabricksSparkJarActivity(ExecutionActivity): :param main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). - :type main_class_name: object + :type main_class_name: any :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] + :type parameters: list[any] :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :type libraries: list[dict[str, any]] """ _validation = { @@ -11510,15 +11820,15 @@ def __init__( self, *, name: str, - main_class_name: object, - additional_properties: Optional[Dict[str, object]] = None, + main_class_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - parameters: Optional[List[object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + parameters: Optional[List[Any]] = None, + libraries: Optional[List[Dict[str, Any]]] = None, **kwargs ): super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -11535,7 +11845,7 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -11552,11 +11862,11 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). - :type python_file: object + :type python_file: any :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] + :type parameters: list[any] :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :type libraries: list[dict[str, any]] """ _validation = { @@ -11583,15 +11893,15 @@ def __init__( self, *, name: str, - python_file: object, - additional_properties: Optional[Dict[str, object]] = None, + python_file: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - parameters: Optional[List[object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + parameters: Optional[List[Any]] = None, + libraries: Optional[List[Dict[str, Any]]] = None, **kwargs ): super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -11607,17 +11917,23 @@ class DataFlow(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: MappingDataFlow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -11633,7 +11949,7 @@ def __init__( self, *, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DataFlowFolder"] = None, **kwargs ): @@ -11749,7 +12065,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param session_id: The ID of data flow debug session. :type session_id: str :param data_flow: Data flow instance. @@ -11777,7 +12093,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, session_id: Optional[str] = None, data_flow: Optional["DataFlowDebugResource"] = None, datasets: Optional[List["DatasetDebugResource"]] = None, @@ -11802,9 +12118,9 @@ class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): :param source_settings: Source setting for data flow debug. :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :param parameters: Data flow parameters. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object + :type dataset_parameters: any """ _attribute_map = { @@ -11817,8 +12133,8 @@ def __init__( self, *, source_settings: Optional[List["DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, + parameters: Optional[Dict[str, Any]] = None, + dataset_parameters: Optional[Any] = None, **kwargs ): super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) @@ -11884,7 +12200,7 @@ class DataFlowDebugSessionInfo(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param data_flow_name: The name of the data flow. :type data_flow_name: str :param compute_type: Compute type of the cluster. @@ -11921,7 +12237,7 @@ class DataFlowDebugSessionInfo(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, data_flow_name: Optional[str] = None, compute_type: Optional[str] = None, core_count: Optional[int] = None, @@ -12008,13 +12324,13 @@ class DataFlowReference(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". + :type additional_properties: dict[str, any] + :ivar type: Data flow reference type. Has constant value: "DataFlowReference". :vartype type: str :param reference_name: Required. Reference data flow name. :type reference_name: str :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object + :type dataset_parameters: any """ _validation = { @@ -12035,8 +12351,8 @@ def __init__( self, *, reference_name: str, - additional_properties: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + dataset_parameters: Optional[Any] = None, **kwargs ): super(DataFlowReference, self).__init__(**kwargs) @@ -12257,7 +12573,7 @@ class DataFlowSourceSetting(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param source_name: The data flow source name. :type source_name: str :param row_limit: Defines the row limit of data flow source in debug. @@ -12273,7 +12589,7 @@ class DataFlowSourceSetting(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, source_name: Optional[str] = None, row_limit: Optional[int] = None, **kwargs @@ -12291,7 +12607,7 @@ class DataFlowStagingInfo(msrest.serialization.Model): :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any """ _attribute_map = { @@ -12303,7 +12619,7 @@ def __init__( self, *, linked_service: Optional["LinkedServiceReference"] = None, - folder_path: Optional[object] = None, + folder_path: Optional[Any] = None, **kwargs ): super(DataFlowStagingInfo, self).__init__(**kwargs) @@ -12318,7 +12634,7 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -12335,24 +12651,24 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object + :type script_path: any :param script_linked_service: Required. Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object + :type degree_of_parallelism: any :param priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. - :type priority: object + :type priority: any :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). - :type runtime_version: object + :type runtime_version: any :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object + :type compilation_mode: any """ _validation = { @@ -12384,19 +12700,19 @@ def __init__( self, *, name: str, - script_path: object, + script_path: Any, script_linked_service: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - degree_of_parallelism: Optional[object] = None, - priority: Optional[object] = None, - parameters: Optional[Dict[str, object]] = None, - runtime_version: Optional[object] = None, - compilation_mode: Optional[object] = None, + degree_of_parallelism: Optional[Any] = None, + priority: Optional[Any] = None, + parameters: Optional[Dict[str, Any]] = None, + runtime_version: Optional[Any] = None, + compilation_mode: Optional[Any] = None, **kwargs ): super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -12420,7 +12736,7 @@ class DatasetCompression(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str """ @@ -12441,7 +12757,7 @@ class DatasetCompression(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(DatasetCompression, self).__init__(**kwargs) @@ -12456,7 +12772,7 @@ class DatasetBZip2Compression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str """ @@ -12473,7 +12789,7 @@ class DatasetBZip2Compression(DatasetCompression): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12484,9 +12800,9 @@ class DatasetDataElement(msrest.serialization.Model): """Columns that define the structure of the dataset. :param name: Name of the column. Type: string (or Expression with resultType string). - :type name: object + :type name: any :param type: Type of the column. Type: string (or Expression with resultType string). - :type type: object + :type type: any """ _attribute_map = { @@ -12497,8 +12813,8 @@ class DatasetDataElement(msrest.serialization.Model): def __init__( self, *, - name: Optional[object] = None, - type: Optional[object] = None, + name: Optional[Any] = None, + type: Optional[Any] = None, **kwargs ): super(DatasetDataElement, self).__init__(**kwargs) @@ -12544,11 +12860,11 @@ class DatasetDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: any """ _validation = { @@ -12558,14 +12874,14 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + level: Optional[Any] = None, **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12601,11 +12917,11 @@ class DatasetGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: any """ _validation = { @@ -12615,14 +12931,14 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + level: Optional[Any] = None, **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12669,12 +12985,12 @@ class DatasetReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". + :ivar type: Dataset reference type. Has constant value: "DatasetReference". :vartype type: str :param reference_name: Required. Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _validation = { @@ -12694,7 +13010,7 @@ def __init__( self, *, reference_name: str, - parameters: Optional[Dict[str, object]] = None, + parameters: Optional[Dict[str, Any]] = None, **kwargs ): super(DatasetReference, self).__init__(**kwargs) @@ -12752,11 +13068,11 @@ class DatasetSchemaDataElement(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Name of the schema column. Type: string (or Expression with resultType string). - :type name: object + :type name: any :param type: Type of the schema column. Type: string (or Expression with resultType string). - :type type: object + :type type: any """ _attribute_map = { @@ -12768,9 +13084,9 @@ class DatasetSchemaDataElement(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - name: Optional[object] = None, - type: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + name: Optional[Any] = None, + type: Optional[Any] = None, **kwargs ): super(DatasetSchemaDataElement, self).__init__(**kwargs) @@ -12786,7 +13102,7 @@ class DatasetTarCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str """ @@ -12803,7 +13119,7 @@ class DatasetTarCompression(DatasetCompression): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12817,11 +13133,11 @@ class DatasetTarGZipCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The TarGZip compression level. + :type level: any """ _validation = { @@ -12831,14 +13147,14 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + level: Optional[Any] = None, **kwargs ): super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12853,11 +13169,11 @@ class DatasetZipDeflateCompression(DatasetCompression): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: any """ _validation = { @@ -12867,14 +13183,14 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + level: Optional[Any] = None, **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12889,7 +13205,7 @@ class Db2LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -12899,36 +13215,36 @@ class Db2LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: "Basic". :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type package_collection: object + :type package_collection: any :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type certificate_common_name: object + :type certificate_common_name: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -12956,20 +13272,20 @@ class Db2LinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - server: Optional[object] = None, - database: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, + server: Optional[Any] = None, + database: Optional[Any] = None, authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, - username: Optional[object] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - package_collection: Optional[object] = None, - certificate_common_name: Optional[object] = None, - encrypted_credential: Optional[object] = None, + package_collection: Optional[Any] = None, + certificate_common_name: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -12992,26 +13308,29 @@ class Db2Source(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -13024,6 +13343,7 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -13032,16 +13352,17 @@ class Db2Source(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'Db2Source' # type: str self.query = query @@ -13053,34 +13374,34 @@ class Db2TableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -13107,16 +13428,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -13133,7 +13454,7 @@ class DeleteActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -13150,13 +13471,13 @@ class DeleteActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param max_concurrent_connections: The max concurrent connections to connect data source at the same time. :type max_concurrent_connections: int :param enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object + :type enable_logging: any :param log_storage_settings: Log storage settings customer need to provide when enableLogging is true. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings @@ -13195,15 +13516,15 @@ def __init__( *, name: str, dataset: "DatasetReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - recursive: Optional[object] = None, + recursive: Optional[Any] = None, max_concurrent_connections: Optional[int] = None, - enable_logging: Optional[object] = None, + enable_logging: Optional[Any] = None, log_storage_settings: Optional["LogStorageSettings"] = None, store_settings: Optional["StoreReadSettings"] = None, **kwargs @@ -13246,23 +13567,23 @@ class DelimitedTextDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -13270,31 +13591,30 @@ class DelimitedTextDataset(Dataset): :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: object + :type column_delimiter: any :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object + :type row_delimiter: any :param encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~azure.mgmt.datafactory.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :type encoding_name: any + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: any + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: any :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object + :type quote_char: any :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object + :type escape_char: any :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object + :type first_row_as_header: any :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any """ _validation = { @@ -13316,8 +13636,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -13328,23 +13648,23 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - column_delimiter: Optional[object] = None, - row_delimiter: Optional[object] = None, - encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, - quote_char: Optional[object] = None, - escape_char: Optional[object] = None, - first_row_as_header: Optional[object] = None, - null_value: Optional[object] = None, + column_delimiter: Optional[Any] = None, + row_delimiter: Optional[Any] = None, + encoding_name: Optional[Any] = None, + compression_codec: Optional[Any] = None, + compression_level: Optional[Any] = None, + quote_char: Optional[Any] = None, + escape_char: Optional[Any] = None, + first_row_as_header: Optional[Any] = None, + null_value: Optional[Any] = None, **kwargs ): super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -13368,12 +13688,12 @@ class DelimitedTextReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). - :type skip_line_count: object + :type skip_line_count: any :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ @@ -13392,8 +13712,8 @@ class DelimitedTextReadSettings(FormatReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - skip_line_count: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + skip_line_count: Optional[Any] = None, compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): @@ -13410,24 +13730,27 @@ class DelimitedTextSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. @@ -13446,6 +13769,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -13453,17 +13777,18 @@ class DelimitedTextSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["DelimitedTextWriteSettings"] = None, **kwargs ): - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13476,18 +13801,21 @@ class DelimitedTextSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. @@ -13507,6 +13835,7 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -13515,16 +13844,17 @@ class DelimitedTextSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["DelimitedTextReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13538,22 +13868,22 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). - :type quote_all_text: object + :type quote_all_text: any :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). - :type file_extension: object + :type file_extension: any :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -13573,11 +13903,11 @@ class DelimitedTextWriteSettings(FormatWriteSettings): def __init__( self, *, - file_extension: object, - additional_properties: Optional[Dict[str, object]] = None, - quote_all_text: Optional[object] = None, - max_rows_per_file: Optional[object] = None, - file_name_prefix: Optional[object] = None, + file_extension: Any, + additional_properties: Optional[Dict[str, Any]] = None, + quote_all_text: Optional[Any] = None, + max_rows_per_file: Optional[Any] = None, + file_name_prefix: Optional[Any] = None, **kwargs ): super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -13627,14 +13957,14 @@ class DistcpSettings(msrest.serialization.Model): :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). - :type resource_manager_endpoint: object + :type resource_manager_endpoint: any :param temp_script_path: Required. Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). - :type temp_script_path: object + :type temp_script_path: any :param distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). - :type distcp_options: object + :type distcp_options: any """ _validation = { @@ -13651,9 +13981,9 @@ class DistcpSettings(msrest.serialization.Model): def __init__( self, *, - resource_manager_endpoint: object, - temp_script_path: object, - distcp_options: Optional[object] = None, + resource_manager_endpoint: Any, + temp_script_path: Any, + distcp_options: Optional[Any] = None, **kwargs ): super(DistcpSettings, self).__init__(**kwargs) @@ -13669,29 +13999,29 @@ class DocumentDbCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. Document Database collection name. Type: string (or Expression with resultType string). - :type collection_name: object + :type collection_name: any """ _validation = { @@ -13717,13 +14047,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection_name: object, - additional_properties: Optional[Dict[str, object]] = None, + collection_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -13739,30 +14069,33 @@ class DocumentDbCollectionSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). - :type nesting_separator: object + :type nesting_separator: any :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. - :type write_behavior: object + :type write_behavior: any """ _validation = { @@ -13777,6 +14110,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -13784,17 +14118,18 @@ class DocumentDbCollectionSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - nesting_separator: Optional[object] = None, - write_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + nesting_separator: Optional[Any] = None, + write_behavior: Optional[Any] = None, **kwargs ): - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior @@ -13807,26 +14142,29 @@ class DocumentDbCollectionSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Documents query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). - :type nesting_separator: object + :type nesting_separator: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -13842,6 +14180,7 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -13851,17 +14190,18 @@ class DocumentDbCollectionSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, - nesting_separator: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, + nesting_separator: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator @@ -13876,7 +14216,7 @@ class DrillLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -13886,16 +14226,16 @@ class DrillLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -13917,14 +14257,14 @@ class DrillLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -13941,27 +14281,30 @@ class DrillSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -13974,6 +14317,7 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -13982,16 +14326,17 @@ class DrillSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DrillSource' # type: str self.query = query @@ -14003,34 +14348,34 @@ class DrillTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -14057,16 +14402,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -14080,10 +14425,10 @@ class DWCopyCommandDefaultValue(msrest.serialization.Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). - :type column_name: object + :type column_name: any :param default_value: The default value of the column. Type: object (or Expression with resultType string). - :type default_value: object + :type default_value: any """ _attribute_map = { @@ -14094,8 +14439,8 @@ class DWCopyCommandDefaultValue(msrest.serialization.Model): def __init__( self, *, - column_name: Optional[object] = None, - default_value: Optional[object] = None, + column_name: Optional[Any] = None, + default_value: Optional[Any] = None, **kwargs ): super(DWCopyCommandDefaultValue, self).__init__(**kwargs) @@ -14141,7 +14486,7 @@ class DynamicsAXLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -14151,13 +14496,13 @@ class DynamicsAXLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. - :type url: object + :type url: any :param service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). @@ -14165,14 +14510,14 @@ class DynamicsAXLinkedService(LinkedService): :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object + :type aad_resource_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -14202,17 +14547,17 @@ class DynamicsAXLinkedService(LinkedService): def __init__( self, *, - url: object, - service_principal_id: object, + url: Any, + service_principal_id: Any, service_principal_key: "SecretBase", - tenant: object, - aad_resource_id: object, - additional_properties: Optional[Dict[str, object]] = None, + tenant: Any, + aad_resource_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - encrypted_credential: Optional[object] = None, + annotations: Optional[List[Any]] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -14232,29 +14577,29 @@ class DynamicsAXResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -14280,13 +14625,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - path: object, - additional_properties: Optional[Dict[str, object]] = None, + path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -14302,32 +14647,35 @@ class DynamicsAXSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -14340,6 +14688,7 @@ class DynamicsAXSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -14349,17 +14698,18 @@ class DynamicsAXSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, - http_request_timeout: Optional[object] = None, + query: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, **kwargs ): - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DynamicsAXSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -14372,29 +14722,29 @@ class DynamicsCrmEntityDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: object + :type entity_name: any """ _validation = { @@ -14419,14 +14769,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, + entity_name: Optional[Any] = None, **kwargs ): super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -14441,7 +14791,7 @@ class DynamicsCrmLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -14451,47 +14801,43 @@ class DynamicsCrmLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: any :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object + :type host_name: any :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object + :type service_uri: any :param organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object + :type organization_name: any :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: any :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password to access the Dynamics CRM instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: any :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14501,7 +14847,7 @@ class DynamicsCrmLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -14517,16 +14863,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14534,23 +14880,23 @@ class DynamicsCrmLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + deployment_type: Any, + authentication_type: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + host_name: Optional[Any] = None, + port: Optional[Any] = None, + service_uri: Optional[Any] = None, + organization_name: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_id: Optional[Any] = None, + service_principal_credential_type: Optional[Any] = None, service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -14576,34 +14922,37 @@ class DynamicsCrmSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object + :type alternate_key_name: any """ _validation = { @@ -14619,6 +14968,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -14628,17 +14978,18 @@ def __init__( self, *, write_behavior: Union[str, "DynamicsSinkWriteBehavior"], - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - ignore_null_values: Optional[object] = None, - alternate_key_name: Optional[object] = None, - **kwargs - ): - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + ignore_null_values: Optional[Any] = None, + alternate_key_name: Optional[Any] = None, + **kwargs + ): + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -14652,21 +15003,24 @@ class DynamicsCrmSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -14682,6 +15036,7 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -14689,15 +15044,16 @@ class DynamicsCrmSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = query self.additional_columns = additional_columns @@ -14710,29 +15066,29 @@ class DynamicsEntityDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type entity_name: object + :type entity_name: any """ _validation = { @@ -14757,14 +15113,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, + entity_name: Optional[Any] = None, **kwargs ): super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -14779,7 +15135,7 @@ class DynamicsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -14789,44 +15145,42 @@ class DynamicsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: any :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: object + :type host_name: any :param port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object + :type port: any + :param service_uri: The URL to the Microsoft Dynamics server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: any :param organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object + :type organization_name: any :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: any :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password to access the Dynamics instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14836,7 +15190,7 @@ class DynamicsLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -14852,12 +15206,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -14869,23 +15223,23 @@ class DynamicsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + deployment_type: Any, + authentication_type: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + host_name: Optional[Any] = None, + port: Optional[Any] = None, + service_uri: Optional[Any] = None, + organization_name: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_id: Optional[Any] = None, + service_principal_credential_type: Optional[str] = None, service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -14911,34 +15265,37 @@ class DynamicsSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.DynamicsSinkWriteBehavior :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - :type alternate_key_name: object + :type alternate_key_name: any """ _validation = { @@ -14954,6 +15311,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -14963,17 +15321,18 @@ def __init__( self, *, write_behavior: Union[str, "DynamicsSinkWriteBehavior"], - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - ignore_null_values: Optional[object] = None, - alternate_key_name: Optional[object] = None, - **kwargs - ): - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + ignore_null_values: Optional[Any] = None, + alternate_key_name: Optional[Any] = None, + **kwargs + ): + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -14987,21 +15346,24 @@ class DynamicsSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -15017,6 +15379,7 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -15024,15 +15387,16 @@ class DynamicsSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -15045,7 +15409,7 @@ class EloquaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -15055,28 +15419,28 @@ class EloquaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object + :type endpoint: any :param username: Required. The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice). - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -15104,18 +15468,18 @@ class EloquaLinkedService(LinkedService): def __init__( self, *, - endpoint: object, - username: object, - additional_properties: Optional[Dict[str, object]] = None, + endpoint: Any, + username: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -15136,28 +15500,28 @@ class EloquaObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -15182,14 +15546,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -15204,27 +15568,30 @@ class EloquaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -15237,6 +15604,7 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -15245,16 +15613,17 @@ class EloquaSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'EloquaSource' # type: str self.query = query @@ -15309,7 +15678,7 @@ class EntityReference(msrest.serialization.Model): """The entity reference. :param type: The type of this referenced entity. Possible values include: - "IntegrationRuntimeReference", "LinkedServiceReference". + "IntegrationRuntimeReference", "LinkedServiceReference", "CredentialReference". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str @@ -15377,42 +15746,45 @@ class ExcelDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the excel storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). - :type sheet_name: object + :type sheet_name: any + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: any :param range: The partial data of one sheet. Type: string (or Expression with resultType string). - :type range: object + :type range: any :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object + :type first_row_as_header: any :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any """ _validation = { @@ -15432,6 +15804,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -15442,25 +15815,27 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - sheet_name: Optional[object] = None, - range: Optional[object] = None, - first_row_as_header: Optional[object] = None, + sheet_name: Optional[Any] = None, + sheet_index: Optional[Any] = None, + range: Optional[Any] = None, + first_row_as_header: Optional[Any] = None, compression: Optional["DatasetCompression"] = None, - null_value: Optional[object] = None, + null_value: Optional[Any] = None, **kwargs ): super(ExcelDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'Excel' # type: str self.location = location self.sheet_name = sheet_name + self.sheet_index = sheet_index self.range = range self.first_row_as_header = first_row_as_header self.compression = compression @@ -15474,18 +15849,21 @@ class ExcelSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Excel store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -15503,6 +15881,7 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -15510,15 +15889,16 @@ class ExcelSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ExcelSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -15531,7 +15911,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -15556,14 +15936,14 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :param trace_level: Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). - :type trace_level: object + :type trace_level: any :param continue_on_error: Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). - :type continue_on_error: object + :type continue_on_error: any :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). - :type run_concurrently: object + :type run_concurrently: any """ _validation = { @@ -15595,7 +15975,7 @@ def __init__( *, name: str, data_flow: "DataFlowReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -15604,9 +15984,9 @@ def __init__( staging: Optional["DataFlowStagingInfo"] = None, integration_runtime: Optional["IntegrationRuntimeReference"] = None, compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, - trace_level: Optional[object] = None, - continue_on_error: Optional[object] = None, - run_concurrently: Optional[object] = None, + trace_level: Optional[Any] = None, + continue_on_error: Optional[Any] = None, + run_concurrently: Optional[Any] = None, **kwargs ): super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -15626,10 +16006,10 @@ class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression with resultType string). - :type compute_type: object + :type compute_type: any :param core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). - :type core_count: object + :type core_count: any """ _attribute_map = { @@ -15640,8 +16020,8 @@ class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): def __init__( self, *, - compute_type: Optional[object] = None, - core_count: Optional[object] = None, + compute_type: Optional[Any] = None, + core_count: Optional[Any] = None, **kwargs ): super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) @@ -15649,14 +16029,14 @@ def __init__( self.core_count = core_count -class ExecutePipelineActivity(Activity): +class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -15670,7 +16050,7 @@ class ExecutePipelineActivity(Activity): :param pipeline: Required. Pipeline reference. :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :param wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. :type wait_on_completion: bool @@ -15699,11 +16079,11 @@ def __init__( *, name: str, pipeline: "PipelineReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, - parameters: Optional[Dict[str, object]] = None, + parameters: Optional[Dict[str, Any]] = None, wait_on_completion: Optional[bool] = None, **kwargs ): @@ -15721,7 +16101,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -15740,13 +16120,13 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). - :type runtime: object + :type runtime: any :param logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). - :type logging_level: object + :type logging_level: any :param environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). - :type environment_path: object + :type environment_path: any :param execution_credential: The package execution credential. :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential :param connect_via: Required. The integration runtime reference. @@ -15757,10 +16137,12 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. - :type project_connection_managers: dict[str, object] + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] :param package_connection_managers: The package level connection managers to execute the SSIS package. - :type package_connection_managers: dict[str, object] + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] :param property_overrides: The property overrides to execute the SSIS package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] :param log_location: SSIS package execution log location. @@ -15791,8 +16173,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } @@ -15803,20 +16185,20 @@ def __init__( name: str, package_location: "SSISPackageLocation", connect_via: "IntegrationRuntimeReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - runtime: Optional[object] = None, - logging_level: Optional[object] = None, - environment_path: Optional[object] = None, + runtime: Optional[Any] = None, + logging_level: Optional[Any] = None, + environment_path: Optional[Any] = None, execution_credential: Optional["SSISExecutionCredential"] = None, project_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, package_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, - project_connection_managers: Optional[Dict[str, object]] = None, - package_connection_managers: Optional[Dict[str, object]] = None, + project_connection_managers: Optional[Dict[str, Dict[str, "SSISExecutionParameter"]]] = None, + package_connection_managers: Optional[Dict[str, Dict[str, "SSISExecutionParameter"]]] = None, property_overrides: Optional[Dict[str, "SSISPropertyOverride"]] = None, log_location: Optional["SSISLogLocation"] = None, **kwargs @@ -15954,7 +16336,7 @@ class Expression(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Expression type. Default value: "Expression". + :ivar type: Expression type. Has constant value: "Expression". :vartype type: str :param value: Required. Expression value. :type value: str @@ -16052,7 +16434,7 @@ class Factory(Resource): :vartype e_tag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param identity: Managed service identity of the factory. :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. @@ -16105,7 +16487,7 @@ def __init__( *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, identity: Optional["FactoryIdentity"] = None, repo_configuration: Optional["FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "GlobalParameterSpecification"]] = None, @@ -16257,7 +16639,7 @@ class FactoryIdentity(msrest.serialization.Model): :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str :param user_assigned_identities: List of user assigned identities for the factory. - :type user_assigned_identities: dict[str, object] + :type user_assigned_identities: dict[str, any] """ _validation = { @@ -16277,7 +16659,7 @@ def __init__( self, *, type: Union[str, "FactoryIdentityType"], - user_assigned_identities: Optional[Dict[str, object]] = None, + user_assigned_identities: Optional[Dict[str, Any]] = None, **kwargs ): super(FactoryIdentity, self).__init__(**kwargs) @@ -16439,7 +16821,7 @@ class FileServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -16449,19 +16831,19 @@ class FileServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. Host name of the server. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). - :type user_id: object + :type user_id: any :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -16485,15 +16867,15 @@ class FileServerLinkedService(LinkedService): def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + user_id: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -16511,15 +16893,15 @@ class FileServerLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -16536,9 +16918,9 @@ class FileServerLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -16552,42 +16934,45 @@ class FileServerReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: object + :type file_filter: any """ _validation = { @@ -16598,6 +16983,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16613,21 +16999,22 @@ class FileServerReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, - file_filter: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, + file_filter: Optional[Any] = None, **kwargs ): - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileServerReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -16648,14 +17035,17 @@ class FileServerWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -16666,18 +17056,20 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, **kwargs ): - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'FileServerWriteSettings' # type: str @@ -16688,43 +17080,43 @@ class FileShareDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). - :type file_filter: object + :type file_filter: any :param compression: The data compression method used for the file system. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ @@ -16757,19 +17149,19 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, format: Optional["DatasetStorageFormat"] = None, - file_filter: Optional[object] = None, + file_filter: Optional[Any] = None, compression: Optional["DatasetCompression"] = None, **kwargs ): @@ -16791,26 +17183,29 @@ class FileSystemSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any """ _validation = { @@ -16825,22 +17220,24 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, **kwargs ): - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSink' # type: str self.copy_behavior = copy_behavior @@ -16852,21 +17249,24 @@ class FileSystemSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -16882,6 +17282,7 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -16889,28 +17290,29 @@ class FileSystemSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSource' # type: str self.recursive = recursive self.additional_columns = additional_columns -class FilterActivity(Activity): +class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -16951,7 +17353,7 @@ def __init__( name: str, items: "Expression", condition: "Expression", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -16963,14 +17365,14 @@ def __init__( self.condition = condition -class ForEachActivity(Activity): +class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -17019,7 +17421,7 @@ def __init__( name: str, items: "Expression", activities: List["Activity"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -17042,33 +17444,36 @@ class FtpReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool """ @@ -17081,6 +17486,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -17094,19 +17500,20 @@ class FtpReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - file_list_path: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + file_list_path: Optional[Any] = None, use_binary_transfer: Optional[bool] = None, **kwargs ): - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FtpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -17125,7 +17532,7 @@ class FtpServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -17135,32 +17542,32 @@ class FtpServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "Anonymous". :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to logon the FTP server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object + :type enable_ssl: any :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :type enable_server_certificate_validation: any """ _validation = { @@ -17188,19 +17595,19 @@ class FtpServerLinkedService(LinkedService): def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, authentication_type: Optional[Union[str, "FtpAuthenticationType"]] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - enable_ssl: Optional[object] = None, - enable_server_certificate_validation: Optional[object] = None, + encrypted_credential: Optional[Any] = None, + enable_ssl: Optional[Any] = None, + enable_server_certificate_validation: Optional[Any] = None, **kwargs ): super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -17222,15 +17629,15 @@ class FtpServerLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -17247,9 +17654,9 @@ class FtpServerLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -17261,7 +17668,7 @@ class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param status: Status of the operation. :type status: str """ @@ -17274,7 +17681,7 @@ class GetDataFactoryOperationStatusResponse(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, status: Optional[str] = None, **kwargs ): @@ -17290,7 +17697,7 @@ class GetMetadataActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -17308,7 +17715,7 @@ class GetMetadataActivity(ExecutionActivity): :param dataset: Required. GetMetadata activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] + :type field_list: list[any] :param store_settings: GetMetadata activity store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: GetMetadata activity format settings. @@ -17341,13 +17748,13 @@ def __init__( *, name: str, dataset: "DatasetReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - field_list: Optional[List[object]] = None, + field_list: Optional[List[Any]] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["FormatReadSettings"] = None, **kwargs @@ -17449,7 +17856,7 @@ class GlobalParameterSpecification(msrest.serialization.Model): "Int", "Float", "Bool", "Array". :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :param value: Required. Value of parameter. - :type value: object + :type value: any """ _validation = { @@ -17466,7 +17873,7 @@ def __init__( self, *, type: Union[str, "GlobalParameterType"], - value: object, + value: Any, **kwargs ): super(GlobalParameterSpecification, self).__init__(**kwargs) @@ -17481,7 +17888,7 @@ class GoogleAdWordsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -17491,10 +17898,10 @@ class GoogleAdWordsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param client_customer_id: Required. The Client customer ID of the AdWords account that you want to fetch report data for. - :type client_customer_id: object + :type client_customer_id: any :param developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. :type developer_token: ~azure.mgmt.datafactory.models.SecretBase @@ -17508,27 +17915,27 @@ class GoogleAdWordsLinkedService(LinkedService): :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret of the google application used to acquire the refresh token. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: object + :type email: any :param key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: object + :type key_file_path: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -17561,22 +17968,22 @@ class GoogleAdWordsLinkedService(LinkedService): def __init__( self, *, - client_customer_id: object, + client_customer_id: Any, developer_token: "SecretBase", authentication_type: Union[str, "GoogleAdWordsAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, refresh_token: Optional["SecretBase"] = None, - client_id: Optional[object] = None, + client_id: Optional[Any] = None, client_secret: Optional["SecretBase"] = None, - email: Optional[object] = None, - key_file_path: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - encrypted_credential: Optional[object] = None, + email: Optional[Any] = None, + key_file_path: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -17601,28 +18008,28 @@ class GoogleAdWordsObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -17647,14 +18054,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -17669,27 +18076,30 @@ class GoogleAdWordsSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -17702,6 +18112,7 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -17710,16 +18121,17 @@ class GoogleAdWordsSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -17731,7 +18143,7 @@ class GoogleBigQueryLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -17741,15 +18153,15 @@ class GoogleBigQueryLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param project: Required. The default BigQuery project to query against. - :type project: object + :type project: any :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object + :type additional_projects: any :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. - :type request_google_drive_scope: object + :type request_google_drive_scope: any :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: "ServiceAuthentication", "UserAuthentication". @@ -17760,27 +18172,27 @@ class GoogleBigQueryLinkedService(LinkedService): :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret of the google application used to acquire the refresh token. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. - :type email: object + :type email: any :param key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. - :type key_file_path: object + :type key_file_path: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -17813,23 +18225,23 @@ class GoogleBigQueryLinkedService(LinkedService): def __init__( self, *, - project: object, + project: Any, authentication_type: Union[str, "GoogleBigQueryAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - additional_projects: Optional[object] = None, - request_google_drive_scope: Optional[object] = None, + annotations: Optional[List[Any]] = None, + additional_projects: Optional[Any] = None, + request_google_drive_scope: Optional[Any] = None, refresh_token: Optional["SecretBase"] = None, - client_id: Optional[object] = None, + client_id: Optional[Any] = None, client_secret: Optional["SecretBase"] = None, - email: Optional[object] = None, - key_file_path: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - encrypted_credential: Optional[object] = None, + email: Optional[Any] = None, + key_file_path: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -17855,35 +18267,35 @@ class GoogleBigQueryObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using database + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Google BigQuery. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). - :type dataset: object + :type dataset: any """ _validation = { @@ -17910,16 +18322,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - dataset: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + dataset: Optional[Any] = None, **kwargs ): super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -17936,27 +18348,30 @@ class GoogleBigQuerySource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -17969,6 +18384,7 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -17977,16 +18393,17 @@ class GoogleBigQuerySource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -17998,7 +18415,7 @@ class GoogleCloudStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -18008,10 +18425,10 @@ class GoogleCloudStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -18019,11 +18436,11 @@ class GoogleCloudStorageLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -18046,15 +18463,15 @@ class GoogleCloudStorageLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + access_key_id: Optional[Any] = None, secret_access_key: Optional["SecretBase"] = None, - service_url: Optional[object] = None, - encrypted_credential: Optional[object] = None, + service_url: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -18072,21 +18489,21 @@ class GoogleCloudStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -18105,11 +18522,11 @@ class GoogleCloudStorageLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - bucket_name: Optional[object] = None, - version: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + bucket_name: Optional[Any] = None, + version: Optional[Any] = None, **kwargs ): super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -18125,42 +18542,45 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -18171,6 +18591,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18186,21 +18607,22 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - prefix: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + prefix: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18221,7 +18643,7 @@ class GreenplumLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -18231,16 +18653,16 @@ class GreenplumLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -18262,14 +18684,14 @@ class GreenplumLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -18286,27 +18708,30 @@ class GreenplumSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -18319,6 +18744,7 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18327,16 +18753,17 @@ class GreenplumSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GreenplumSource' # type: str self.query = query @@ -18348,34 +18775,34 @@ class GreenplumTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -18402,16 +18829,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -18428,7 +18855,7 @@ class HBaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -18438,39 +18865,39 @@ class HBaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. - :type port: object + :type port: any :param http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version). - :type http_path: object + :type http_path: any :param authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: "Anonymous", "Basic". :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -18502,22 +18929,22 @@ class HBaseLinkedService(LinkedService): def __init__( self, *, - host: object, + host: Any, authentication_type: Union[str, "HBaseAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - http_path: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, + http_path: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + enable_ssl: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + allow_host_name_cn_mismatch: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -18542,28 +18969,28 @@ class HBaseObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -18588,14 +19015,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -18610,27 +19037,30 @@ class HBaseSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -18643,6 +19073,7 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18651,16 +19082,17 @@ class HBaseSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HBaseSource' # type: str self.query = query @@ -18672,7 +19104,7 @@ class HdfsLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -18682,20 +19114,20 @@ class HdfsLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object + :type authentication_type: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param user_name: User name for Windows authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Windows authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -18722,15 +19154,15 @@ class HdfsLinkedService(LinkedService): def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + authentication_type: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, **kwargs ): @@ -18750,15 +19182,15 @@ class HdfsLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -18775,9 +19207,9 @@ class HdfsLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -18791,41 +19223,44 @@ class HdfsReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any """ _validation = { @@ -18836,6 +19271,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18851,21 +19287,22 @@ class HdfsReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, distcp_settings: Optional["DistcpSettings"] = None, - delete_files_after_completion: Optional[object] = None, + delete_files_after_completion: Optional[Any] = None, **kwargs ): - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18886,21 +19323,24 @@ class HdfsSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ @@ -18915,6 +19355,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -18922,15 +19363,16 @@ class HdfsSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, distcp_settings: Optional["DistcpSettings"] = None, **kwargs ): - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsSource' # type: str self.recursive = recursive self.distcp_settings = distcp_settings @@ -18943,7 +19385,7 @@ class HDInsightHiveActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -18961,17 +19403,17 @@ class HDInsightHiveActivity(ExecutionActivity): :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object + :type script_path: any :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] + :type defines: dict[str, any] :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] + :type variables: list[any] :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package). :type query_timeout: int @@ -19005,19 +19447,19 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, + arguments: Optional[List[Any]] = None, get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[object] = None, + script_path: Optional[Any] = None, script_linked_service: Optional["LinkedServiceReference"] = None, - defines: Optional[Dict[str, object]] = None, - variables: Optional[List[object]] = None, + defines: Optional[Dict[str, Any]] = None, + variables: Optional[List[Any]] = None, query_timeout: Optional[int] = None, **kwargs ): @@ -19040,7 +19482,7 @@ class HDInsightLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -19050,13 +19492,13 @@ class HDInsightLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with resultType string). - :type cluster_uri: object + :type cluster_uri: any :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: HDInsight cluster password. :type password: ~azure.mgmt.datafactory.models.SecretBase :param linked_service_name: The Azure Storage linked service reference. @@ -19067,13 +19509,13 @@ class HDInsightLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object + :type is_esp_enabled: any :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). - :type file_system: object + :type file_system: any """ _validation = { @@ -19101,19 +19543,19 @@ class HDInsightLinkedService(LinkedService): def __init__( self, *, - cluster_uri: object, - additional_properties: Optional[Dict[str, object]] = None, + cluster_uri: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, linked_service_name: Optional["LinkedServiceReference"] = None, hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, - encrypted_credential: Optional[object] = None, - is_esp_enabled: Optional[object] = None, - file_system: Optional[object] = None, + encrypted_credential: Optional[Any] = None, + is_esp_enabled: Optional[Any] = None, + file_system: Optional[Any] = None, **kwargs ): super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -19135,7 +19577,7 @@ class HDInsightMapReduceActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -19153,19 +19595,19 @@ class HDInsightMapReduceActivity(ExecutionActivity): :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object + :type class_name: any :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object + :type jar_file_path: any :param jar_linked_service: Jar linked service reference. :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param jar_libs: Jar libs. - :type jar_libs: list[object] + :type jar_libs: list[any] :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] + :type defines: dict[str, any] """ _validation = { @@ -19198,20 +19640,20 @@ def __init__( self, *, name: str, - class_name: object, - jar_file_path: object, - additional_properties: Optional[Dict[str, object]] = None, + class_name: Any, + jar_file_path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, + arguments: Optional[List[Any]] = None, get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, jar_linked_service: Optional["LinkedServiceReference"] = None, - jar_libs: Optional[List[object]] = None, - defines: Optional[Dict[str, object]] = None, + jar_libs: Optional[List[Any]] = None, + defines: Optional[Dict[str, Any]] = None, **kwargs ): super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -19233,7 +19675,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -19243,46 +19685,46 @@ class HDInsightOnDemandLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object + :type cluster_size: any :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). - :type time_to_live: object + :type time_to_live: any :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with resultType string). - :type version: object + :type version: any :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object + :type host_subscription_id: any :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The key for the service principal id. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object + :type cluster_resource_group: any :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object + :type cluster_name_prefix: any :param cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). - :type cluster_user_name: object + :type cluster_user_name: any :param cluster_password: The password to access the cluster. :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object + :type cluster_ssh_user_name: any :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight @@ -19294,56 +19736,55 @@ class HDInsightOnDemandLinkedService(LinkedService): as the metastore. :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object + :type cluster_type: any :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). - :type spark_version: object + :type spark_version: any :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object + :type core_configuration: any :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object + :type h_base_configuration: any :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object + :type hdfs_configuration: any :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object + :type hive_configuration: any + :param map_reduce_configuration: Specifies the MapReduce configuration parameters + (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: any :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object + :type oozie_configuration: any :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object + :type storm_configuration: any :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object + :type yarn_configuration: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object + :type head_node_size: any :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object + :type data_node_size: any :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. - :type zookeeper_node_size: object + :type zookeeper_node_size: any :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object + :type virtual_network_id: any :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object + :type subnet_name: any """ _validation = { @@ -19402,44 +19843,44 @@ class HDInsightOnDemandLinkedService(LinkedService): def __init__( self, *, - cluster_size: object, - time_to_live: object, - version: object, + cluster_size: Any, + time_to_live: Any, + version: Any, linked_service_name: "LinkedServiceReference", - host_subscription_id: object, - tenant: object, - cluster_resource_group: object, - additional_properties: Optional[Dict[str, object]] = None, + host_subscription_id: Any, + tenant: Any, + cluster_resource_group: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - cluster_name_prefix: Optional[object] = None, - cluster_user_name: Optional[object] = None, + cluster_name_prefix: Optional[Any] = None, + cluster_user_name: Optional[Any] = None, cluster_password: Optional["SecretBase"] = None, - cluster_ssh_user_name: Optional[object] = None, + cluster_ssh_user_name: Optional[Any] = None, cluster_ssh_password: Optional["SecretBase"] = None, additional_linked_service_names: Optional[List["LinkedServiceReference"]] = None, hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, - cluster_type: Optional[object] = None, - spark_version: Optional[object] = None, - core_configuration: Optional[object] = None, - h_base_configuration: Optional[object] = None, - hdfs_configuration: Optional[object] = None, - hive_configuration: Optional[object] = None, - map_reduce_configuration: Optional[object] = None, - oozie_configuration: Optional[object] = None, - storm_configuration: Optional[object] = None, - yarn_configuration: Optional[object] = None, - encrypted_credential: Optional[object] = None, - head_node_size: Optional[object] = None, - data_node_size: Optional[object] = None, - zookeeper_node_size: Optional[object] = None, + cluster_type: Optional[Any] = None, + spark_version: Optional[Any] = None, + core_configuration: Optional[Any] = None, + h_base_configuration: Optional[Any] = None, + hdfs_configuration: Optional[Any] = None, + hive_configuration: Optional[Any] = None, + map_reduce_configuration: Optional[Any] = None, + oozie_configuration: Optional[Any] = None, + storm_configuration: Optional[Any] = None, + yarn_configuration: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, + head_node_size: Optional[Any] = None, + data_node_size: Optional[Any] = None, + zookeeper_node_size: Optional[Any] = None, script_actions: Optional[List["ScriptAction"]] = None, - virtual_network_id: Optional[object] = None, - subnet_name: Optional[object] = None, + virtual_network_id: Optional[Any] = None, + subnet_name: Optional[Any] = None, **kwargs ): super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -19486,7 +19927,7 @@ class HDInsightPigActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -19505,15 +19946,15 @@ class HDInsightPigActivity(ExecutionActivity): :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). - :type arguments: object + :type arguments: any :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object + :type script_path: any :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] + :type defines: dict[str, any] """ _validation = { @@ -19542,18 +19983,18 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[object] = None, + arguments: Optional[Any] = None, get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[object] = None, + script_path: Optional[Any] = None, script_linked_service: Optional["LinkedServiceReference"] = None, - defines: Optional[Dict[str, object]] = None, + defines: Optional[Dict[str, Any]] = None, **kwargs ): super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -19573,7 +20014,7 @@ class HDInsightSparkActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -19590,12 +20031,12 @@ class HDInsightSparkActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object + :type root_path: any :param entry_file_path: Required. The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object + :type entry_file_path: any :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param spark_job_linked_service: The storage linked service for uploading the entry file and @@ -19605,9 +20046,9 @@ class HDInsightSparkActivity(ExecutionActivity): :type class_name: str :param proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). - :type proxy_user: object + :type proxy_user: any :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] + :type spark_config: dict[str, any] """ _validation = { @@ -19640,20 +20081,20 @@ def __init__( self, *, name: str, - root_path: object, - entry_file_path: object, - additional_properties: Optional[Dict[str, object]] = None, + root_path: Any, + entry_file_path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - arguments: Optional[List[object]] = None, + arguments: Optional[List[Any]] = None, get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, spark_job_linked_service: Optional["LinkedServiceReference"] = None, class_name: Optional[str] = None, - proxy_user: Optional[object] = None, - spark_config: Optional[Dict[str, object]] = None, + proxy_user: Optional[Any] = None, + spark_config: Optional[Dict[str, Any]] = None, **kwargs ): super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -19675,7 +20116,7 @@ class HDInsightStreamingActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -19693,29 +20134,29 @@ class HDInsightStreamingActivity(ExecutionActivity): :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] + :type arguments: list[any] :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). - :type mapper: object + :type mapper: any :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType string). - :type reducer: object + :type reducer: any :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object + :type input: any :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object + :type output: any :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] + :type file_paths: list[any] :param file_linked_service: Linked service reference where the files are located. :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object + :type combiner: any :param command_environment: Command line environment values. - :type command_environment: list[object] + :type command_environment: list[any] :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] + :type defines: dict[str, any] """ _validation = { @@ -19755,24 +20196,24 @@ def __init__( self, *, name: str, - mapper: object, - reducer: object, - input: object, - output: object, - file_paths: List[object], - additional_properties: Optional[Dict[str, object]] = None, + mapper: Any, + reducer: Any, + input: Any, + output: Any, + file_paths: List[Any], + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, + arguments: Optional[List[Any]] = None, get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, file_linked_service: Optional["LinkedServiceReference"] = None, - combiner: Optional[object] = None, - command_environment: Optional[List[object]] = None, - defines: Optional[Dict[str, object]] = None, + combiner: Optional[Any] = None, + command_environment: Optional[List[Any]] = None, + defines: Optional[Dict[str, Any]] = None, **kwargs ): super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -19798,7 +20239,7 @@ class HiveLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -19808,12 +20249,12 @@ class HiveLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object + :type host: any :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object + :type port: any :param server_type: The type of Hive server. Possible values include: "HiveServer1", "HiveServer2", "HiveThriftServer". :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType @@ -19826,40 +20267,40 @@ class HiveLinkedService(LinkedService): "WindowsAzureHDInsightService". :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object + :type service_discovery_mode: any :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. - :type zoo_keeper_name_space: object + :type zoo_keeper_name_space: any :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object + :type use_native_query: any :param username: The user name that you use to access Hive Server. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the Username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object + :type http_path: any :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -19897,28 +20338,28 @@ class HiveLinkedService(LinkedService): def __init__( self, *, - host: object, + host: Any, authentication_type: Union[str, "HiveAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, server_type: Optional[Union[str, "HiveServerType"]] = None, thrift_transport_protocol: Optional[Union[str, "HiveThriftTransportProtocol"]] = None, - service_discovery_mode: Optional[object] = None, - zoo_keeper_name_space: Optional[object] = None, - use_native_query: Optional[object] = None, - username: Optional[object] = None, + service_discovery_mode: Optional[Any] = None, + zoo_keeper_name_space: Optional[Any] = None, + use_native_query: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - http_path: Optional[object] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + http_path: Optional[Any] = None, + enable_ssl: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + allow_host_name_cn_mismatch: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -19949,34 +20390,34 @@ class HiveObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -20003,16 +20444,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -20029,27 +20470,30 @@ class HiveSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -20062,6 +20506,7 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -20070,16 +20515,17 @@ class HiveSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HiveSource' # type: str self.query = query @@ -20091,41 +20537,41 @@ class HttpDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). - :type relative_url: object + :type relative_url: any :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The body for the HTTP request. Type: string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. request-header- - name-1:request-header-value-1 + :type request_body: any + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used on files. @@ -20159,17 +20605,17 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - relative_url: Optional[object] = None, - request_method: Optional[object] = None, - request_body: Optional[object] = None, - additional_headers: Optional[object] = None, + relative_url: Optional[Any] = None, + request_method: Optional[Any] = None, + request_body: Optional[Any] = None, + additional_headers: Optional[Any] = None, format: Optional["DatasetStorageFormat"] = None, compression: Optional["DatasetCompression"] = None, **kwargs @@ -20191,7 +20637,7 @@ class HttpLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -20201,39 +20647,39 @@ class HttpLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: object + :type auth_headers: any :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :type embedded_cert_data: object + :type embedded_cert_data: any :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). - :type cert_thumbprint: object + :type cert_thumbprint: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :type enable_server_certificate_validation: any """ _validation = { @@ -20262,20 +20708,20 @@ class HttpLinkedService(LinkedService): def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, authentication_type: Optional[Union[str, "HttpAuthenticationType"]] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - auth_headers: Optional[object] = None, - embedded_cert_data: Optional[object] = None, - cert_thumbprint: Optional[object] = None, - encrypted_credential: Optional[object] = None, - enable_server_certificate_validation: Optional[object] = None, + auth_headers: Optional[Any] = None, + embedded_cert_data: Optional[Any] = None, + cert_thumbprint: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, + enable_server_certificate_validation: Optional[Any] = None, **kwargs ): super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -20298,29 +20744,32 @@ class HttpReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: object + :type request_body: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. - :type request_timeout: object + :type request_timeout: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any """ _validation = { @@ -20331,6 +20780,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -20342,17 +20792,18 @@ class HttpReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - request_method: Optional[object] = None, - request_body: Optional[object] = None, - additional_headers: Optional[object] = None, - request_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + request_method: Optional[Any] = None, + request_body: Optional[Any] = None, + additional_headers: Optional[Any] = None, + request_timeout: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, + partition_root_path: Optional[Any] = None, **kwargs ): - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpReadSettings' # type: str self.request_method = request_method self.request_body = request_body @@ -20369,18 +20820,18 @@ class HttpServerLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :type relative_url: object + :type relative_url: any """ _validation = { @@ -20398,10 +20849,10 @@ class HttpServerLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - relative_url: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + relative_url: Optional[Any] = None, **kwargs ): super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -20416,23 +20867,26 @@ class HttpSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -20445,20 +20899,22 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - http_request_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, **kwargs ): - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpSource' # type: str self.http_request_timeout = http_request_timeout @@ -20470,7 +20926,7 @@ class HubspotLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -20480,9 +20936,9 @@ class HubspotLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with your Hubspot application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token obtained when initially authenticating your OAuth @@ -20493,18 +20949,18 @@ class HubspotLinkedService(LinkedService): :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -20532,19 +20988,19 @@ class HubspotLinkedService(LinkedService): def __init__( self, *, - client_id: object, - additional_properties: Optional[Dict[str, object]] = None, + client_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, client_secret: Optional["SecretBase"] = None, access_token: Optional["SecretBase"] = None, refresh_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -20566,28 +21022,28 @@ class HubspotObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -20612,14 +21068,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -20634,27 +21090,30 @@ class HubspotSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -20667,6 +21126,7 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -20675,28 +21135,29 @@ class HubspotSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HubspotSource' # type: str self.query = query -class IfConditionActivity(Activity): +class IfConditionActivity(ControlActivity): """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -20742,7 +21203,7 @@ def __init__( *, name: str, expression: "Expression", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -20764,7 +21225,7 @@ class ImpalaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -20774,41 +21235,41 @@ class ImpalaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Impala server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. - :type port: object + :type port: any :param authentication_type: Required. The authentication type to use. Possible values include: "Anonymous", "SASLUsername", "UsernameAndPassword". :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. - :type username: object + :type username: any :param password: The password corresponding to the user name when using UsernameAndPassword. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -20840,22 +21301,22 @@ class ImpalaLinkedService(LinkedService): def __init__( self, *, - host: object, + host: Any, authentication_type: Union[str, "ImpalaAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + enable_ssl: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + allow_host_name_cn_mismatch: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -20880,35 +21341,35 @@ class ImpalaObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Impala. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -20935,16 +21396,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -20961,27 +21422,30 @@ class ImpalaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -20994,6 +21458,7 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21002,16 +21467,17 @@ class ImpalaSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ImpalaSource' # type: str self.query = query @@ -21023,7 +21489,7 @@ class InformixLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -21033,27 +21499,27 @@ class InformixLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. + :type authentication_type: any + :param credential: The access credential portion of the connection string specified in + driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -21079,17 +21545,17 @@ class InformixLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, + annotations: Optional[List[Any]] = None, + authentication_type: Optional[Any] = None, credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -21109,27 +21575,30 @@ class InformixSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -21144,22 +21613,24 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, **kwargs ): - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'InformixSink' # type: str self.pre_copy_script = pre_copy_script @@ -21171,26 +21642,29 @@ class InformixSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -21203,6 +21677,7 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21211,16 +21686,17 @@ class InformixSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'InformixSource' # type: str self.query = query @@ -21232,29 +21708,29 @@ class InformixTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Informix table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -21279,14 +21755,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -21304,7 +21780,7 @@ class IntegrationRuntime(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -21329,7 +21805,7 @@ class IntegrationRuntime(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, **kwargs ): @@ -21370,10 +21846,10 @@ class IntegrationRuntimeComputeProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param location: The location for managed integration runtime. The supported regions could be - found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- - activities. + found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities. :type location: str :param node_size: The node size requirement to managed integration runtime. :type node_size: str @@ -21406,7 +21882,7 @@ class IntegrationRuntimeComputeProperties(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, location: Optional[str] = None, node_size: Optional[str] = None, number_of_nodes: Optional[int] = None, @@ -21432,7 +21908,7 @@ class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar service_token: The token generated in service. Callers use this token to authenticate to integration runtime. :vartype service_token: str @@ -21472,7 +21948,7 @@ class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) @@ -21517,7 +21993,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: "General", "MemoryOptimized", "ComputeOptimized". :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType @@ -21543,7 +22019,7 @@ class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, core_count: Optional[int] = None, time_to_live: Optional[int] = None, @@ -21708,7 +22184,7 @@ class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. @@ -21754,7 +22230,7 @@ class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) @@ -21769,6 +22245,103 @@ def __init__( self.received_bytes = None +class IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints for one category. + + :param category: The category of outbound network dependency. + :type category: str + :param endpoints: The endpoints for outbound network dependency. + :type endpoints: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpoint]'}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + endpoints: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpoint"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint, self).__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpoint(msrest.serialization.Model): + """The endpoint for Azure-SSIS integration runtime outbound network dependency. + + :param domain_name: The domain name of endpoint. + :type domain_name: str + :param endpoint_details: The details of endpoint. + :type endpoint_details: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails]'}, + } + + def __init__( + self, + *, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpoint, self).__init__(**kwargs) + self.domain_name = domain_name + self.endpoint_details = endpoint_details + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails(msrest.serialization.Model): + """The details of Azure-SSIS integration runtime outbound network dependency endpoint. + + :param port: The port of endpoint. + :type port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + *, + port: Optional[int] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails, self).__init__(**kwargs) + self.port = port + + +class IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse(msrest.serialization.Model): + """Azure-SSIS integration runtime outbound network dependency endpoints. + + :param value: The list of outbound network dependency endpoints. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint]'}, + } + + def __init__( + self, + *, + value: Optional[List["IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint"]] = None, + **kwargs + ): + super(IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, self).__init__(**kwargs) + self.value = value + + class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. @@ -21776,13 +22349,12 @@ class IntegrationRuntimeReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference". + :ivar type: Type of integration runtime. Has constant value: "IntegrationRuntimeReference". :vartype type: str :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _validation = { @@ -21802,7 +22374,7 @@ def __init__( self, *, reference_name: str, - parameters: Optional[Dict[str, object]] = None, + parameters: Optional[Dict[str, Any]] = None, **kwargs ): super(IntegrationRuntimeReference, self).__init__(**kwargs) @@ -21882,7 +22454,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param catalog_server_endpoint: The catalog database server URL. :type catalog_server_endpoint: str :param catalog_admin_user_name: The administrator user name of catalog database. @@ -21916,7 +22488,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, catalog_server_endpoint: Optional[str] = None, catalog_admin_user_name: Optional[str] = None, catalog_admin_password: Optional["SecureString"] = None, @@ -21938,7 +22510,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param catalog_info: Catalog information for managed dedicated integration runtime. :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :param license_type: License type for bringing your own license scenario. Possible values @@ -21960,6 +22532,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + :param managed_credential: The user-assigned managed identity reference. + :type managed_credential: ~azure.mgmt.datafactory.models.EntityReference """ _attribute_map = { @@ -21971,12 +22545,13 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'managed_credential': {'key': 'managedCredential', 'type': 'EntityReference'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, catalog_info: Optional["IntegrationRuntimeSsisCatalogInfo"] = None, license_type: Optional[Union[str, "IntegrationRuntimeLicenseType"]] = None, custom_setup_script_properties: Optional["IntegrationRuntimeCustomSetupScriptProperties"] = None, @@ -21984,6 +22559,7 @@ def __init__( edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, package_stores: Optional[List["PackageStore"]] = None, + managed_credential: Optional["EntityReference"] = None, **kwargs ): super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) @@ -21995,6 +22571,7 @@ def __init__( self.edition = edition self.express_custom_setup_properties = express_custom_setup_properties self.package_stores = package_stores + self.managed_credential = managed_credential class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -22009,7 +22586,7 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -22041,7 +22618,7 @@ class IntegrationRuntimeStatus(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(IntegrationRuntimeStatus, self).__init__(**kwargs) @@ -22122,7 +22699,7 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param v_net_id: The ID of the VNet that this integration runtime will join. :type v_net_id: str :param subnet: The name of the subnet this integration runtime will join. @@ -22130,6 +22707,9 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will use. :type public_i_ps: list[str] + :param subnet_id: The ID of subnet, to which this Azure-SSIS integration runtime will be + joined. + :type subnet_id: str """ _attribute_map = { @@ -22137,15 +22717,17 @@ class IntegrationRuntimeVNetProperties(msrest.serialization.Model): 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, v_net_id: Optional[str] = None, subnet: Optional[str] = None, public_i_ps: Optional[List[str]] = None, + subnet_id: Optional[str] = None, **kwargs ): super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) @@ -22153,6 +22735,7 @@ def __init__( self.v_net_id = v_net_id self.subnet = subnet self.public_i_ps = public_i_ps + self.subnet_id = subnet_id class JiraLinkedService(LinkedService): @@ -22162,7 +22745,7 @@ class JiraLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -22172,32 +22755,32 @@ class JiraLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Jira service. (e.g. jira.example.com). - :type host: object + :type host: any :param port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object + :type port: any :param username: Required. The user name that you use to access Jira Service. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -22226,19 +22809,19 @@ class JiraLinkedService(LinkedService): def __init__( self, *, - host: object, - username: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + username: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -22260,28 +22843,28 @@ class JiraObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -22306,14 +22889,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -22328,27 +22911,30 @@ class JiraSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -22361,6 +22947,7 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -22369,16 +22956,17 @@ class JiraSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'JiraSource' # type: str self.query = query @@ -22390,23 +22978,23 @@ class JsonDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -22417,7 +23005,7 @@ class JsonDataset(Dataset): of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ @@ -22446,15 +23034,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - encoding_name: Optional[object] = None, + encoding_name: Optional[Any] = None, compression: Optional["DatasetCompression"] = None, **kwargs ): @@ -22472,35 +23060,34 @@ class JsonFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~azure.mgmt.datafactory.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: any :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). - :type nesting_separator: object + :type nesting_separator: any :param encoding_name: The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). - :type json_node_reference: object + :type json_node_reference: any :param json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). - :type json_path_definition: object + :type json_path_definition: any """ _validation = { @@ -22512,7 +23099,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -22522,14 +23109,14 @@ class JsonFormat(DatasetStorageFormat): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - serializer: Optional[object] = None, - deserializer: Optional[object] = None, - file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None, - nesting_separator: Optional[object] = None, - encoding_name: Optional[object] = None, - json_node_reference: Optional[object] = None, - json_path_definition: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + serializer: Optional[Any] = None, + deserializer: Optional[Any] = None, + file_pattern: Optional[Any] = None, + nesting_separator: Optional[Any] = None, + encoding_name: Optional[Any] = None, + json_node_reference: Optional[Any] = None, + json_path_definition: Optional[Any] = None, **kwargs ): super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) @@ -22548,7 +23135,7 @@ class JsonReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. @@ -22568,7 +23155,7 @@ class JsonReadSettings(FormatReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): @@ -22584,24 +23171,27 @@ class JsonSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Json format settings. @@ -22620,6 +23210,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -22627,17 +23218,18 @@ class JsonSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["JsonWriteSettings"] = None, **kwargs ): - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22650,18 +23242,21 @@ class JsonSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Json format settings. @@ -22681,6 +23276,7 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -22689,16 +23285,17 @@ class JsonSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["JsonReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22712,13 +23309,12 @@ class JsonWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~azure.mgmt.datafactory.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: any """ _validation = { @@ -22728,14 +23324,14 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + file_pattern: Optional[Any] = None, **kwargs ): super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -22986,12 +23582,12 @@ class LinkedServiceReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". + :ivar type: Linked service reference type. Has constant value: "LinkedServiceReference". :vartype type: str :param reference_name: Required. Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _validation = { @@ -23011,7 +23607,7 @@ def __init__( self, *, reference_name: str, - parameters: Optional[Dict[str, object]] = None, + parameters: Optional[Dict[str, Any]] = None, **kwargs ): super(LinkedServiceReference, self).__init__(**kwargs) @@ -23073,7 +23669,7 @@ class LogLocationSettings(msrest.serialization.Model): :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -23089,7 +23685,7 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - path: Optional[object] = None, + path: Optional[Any] = None, **kwargs ): super(LogLocationSettings, self).__init__(**kwargs) @@ -23104,7 +23700,7 @@ class LogSettings(msrest.serialization.Model): :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). - :type enable_copy_activity_log: object + :type enable_copy_activity_log: any :param copy_activity_log_settings: Specifies settings for copy activity log. :type copy_activity_log_settings: ~azure.mgmt.datafactory.models.CopyActivityLogSettings :param log_location_settings: Required. Log location settings customer needs to provide when @@ -23126,7 +23722,7 @@ def __init__( self, *, log_location_settings: "LogLocationSettings", - enable_copy_activity_log: Optional[object] = None, + enable_copy_activity_log: Optional[Any] = None, copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, **kwargs ): @@ -23143,18 +23739,18 @@ class LogStorageSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param linked_service_name: Required. Log storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type path: object + :type path: any :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). - :type log_level: object + :type log_level: any :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). - :type enable_reliable_logging: object + :type enable_reliable_logging: any """ _validation = { @@ -23173,10 +23769,10 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - path: Optional[object] = None, - log_level: Optional[object] = None, - enable_reliable_logging: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + path: Optional[Any] = None, + log_level: Optional[Any] = None, + enable_reliable_logging: Optional[Any] = None, **kwargs ): super(LogStorageSettings, self).__init__(**kwargs) @@ -23194,7 +23790,7 @@ class LookupActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -23215,7 +23811,7 @@ class LookupActivity(ExecutionActivity): :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object + :type first_row_only: any """ _validation = { @@ -23245,13 +23841,13 @@ def __init__( name: str, source: "CopySource", dataset: "DatasetReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - first_row_only: Optional[object] = None, + first_row_only: Optional[Any] = None, **kwargs ): super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -23268,7 +23864,7 @@ class MagentoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -23278,25 +23874,25 @@ class MagentoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: object + :type host: any :param access_token: The access token from Magento. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -23322,17 +23918,17 @@ class MagentoLinkedService(LinkedService): def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, access_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -23352,28 +23948,28 @@ class MagentoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -23398,14 +23994,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -23420,27 +24016,30 @@ class MagentoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -23453,6 +24052,7 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -23461,16 +24061,17 @@ class MagentoSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MagentoSource' # type: str self.query = query @@ -23484,7 +24085,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -23520,7 +24121,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, managed_virtual_network: Optional["ManagedVirtualNetworkReference"] = None, compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, @@ -23542,7 +24143,7 @@ class ManagedIntegrationRuntimeError(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar time: The time when the error occurred. :vartype time: ~datetime.datetime :ivar code: Error code. @@ -23571,7 +24172,7 @@ class ManagedIntegrationRuntimeError(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) @@ -23589,7 +24190,7 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: "Starting", @@ -23614,7 +24215,7 @@ class ManagedIntegrationRuntimeNode(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, errors: Optional[List["ManagedIntegrationRuntimeError"]] = None, **kwargs ): @@ -23632,7 +24233,7 @@ class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. @@ -23669,7 +24270,7 @@ class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) @@ -23691,7 +24292,7 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -23736,7 +24337,7 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) @@ -23754,7 +24355,7 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param connection_state: The managed private endpoint connection state. :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :param fqdns: Fully qualified domain names. @@ -23788,7 +24389,7 @@ class ManagedPrivateEndpoint(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connection_state: Optional["ConnectionStateProperties"] = None, fqdns: Optional[List[str]] = None, group_id: Optional[str] = None, @@ -23889,7 +24490,7 @@ class ManagedVirtualNetwork(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar v_net_id: Managed Virtual Network ID. :vartype v_net_id: str :ivar alias: Managed Virtual Network alias. @@ -23910,7 +24511,7 @@ class ManagedVirtualNetwork(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(ManagedVirtualNetwork, self).__init__(**kwargs) @@ -23958,7 +24559,7 @@ class ManagedVirtualNetworkReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Managed Virtual Network reference type. Default value: + :ivar type: Managed Virtual Network reference type. Has constant value: "ManagedVirtualNetworkReference". :vartype type: str :param reference_name: Required. Reference ManagedVirtualNetwork name. @@ -24035,12 +24636,14 @@ def __init__( class MappingDataFlow(DataFlow): """Mapping data flow. - :param type: Type of data flow.Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. :type type: str :param description: The description of the data flow. :type description: str :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder @@ -24054,6 +24657,10 @@ class MappingDataFlow(DataFlow): :type script: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, @@ -24069,7 +24676,7 @@ def __init__( self, *, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DataFlowFolder"] = None, sources: Optional[List["DataFlowSource"]] = None, sinks: Optional[List["DataFlowSink"]] = None, @@ -24092,7 +24699,7 @@ class MariaDBLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -24102,16 +24709,16 @@ class MariaDBLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -24133,14 +24740,14 @@ class MariaDBLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -24157,27 +24764,30 @@ class MariaDBSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -24190,6 +24800,7 @@ class MariaDBSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24198,16 +24809,17 @@ class MariaDBSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query @@ -24219,28 +24831,28 @@ class MariaDBTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -24265,14 +24877,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -24287,7 +24899,7 @@ class MarketoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -24297,27 +24909,27 @@ class MarketoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: object + :type endpoint: any :param client_id: Required. The client Id of your Marketo service. - :type client_id: object + :type client_id: any :param client_secret: The client secret of your Marketo service. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -24345,18 +24957,18 @@ class MarketoLinkedService(LinkedService): def __init__( self, *, - endpoint: object, - client_id: object, - additional_properties: Optional[Dict[str, object]] = None, + endpoint: Any, + client_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -24377,28 +24989,28 @@ class MarketoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -24423,14 +25035,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -24445,27 +25057,30 @@ class MarketoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -24478,6 +25093,7 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24486,20 +25102,47 @@ class MarketoSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MarketoSource' # type: str self.query = query +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: any + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: any + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[Any] = None, + value: Optional[Any] = None, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = name + self.value = value + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -24507,7 +25150,7 @@ class MicrosoftAccessLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -24517,27 +25160,27 @@ class MicrosoftAccessLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. + :type authentication_type: any + :param credential: The access credential portion of the connection string specified in + driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -24563,17 +25206,17 @@ class MicrosoftAccessLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, + annotations: Optional[List[Any]] = None, + authentication_type: Optional[Any] = None, credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -24593,27 +25236,30 @@ class MicrosoftAccessSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -24628,22 +25274,24 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, **kwargs ): - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = pre_copy_script @@ -24655,20 +25303,23 @@ class MicrosoftAccessSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -24684,6 +25335,7 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -24691,15 +25343,16 @@ class MicrosoftAccessSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = query self.additional_columns = additional_columns @@ -24712,29 +25365,29 @@ class MicrosoftAccessTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -24759,14 +25412,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -24781,29 +25434,29 @@ class MongoDbAtlasCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). - :type collection: object + :type collection: any """ _validation = { @@ -24829,13 +25482,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection: object, - additional_properties: Optional[Dict[str, object]] = None, + collection: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -24851,7 +25504,7 @@ class MongoDbAtlasLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -24861,14 +25514,14 @@ class MongoDbAtlasLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param database: Required. The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -24891,13 +25544,13 @@ class MongoDbAtlasLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - database: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + database: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -24906,6 +25559,74 @@ def __init__( self.database = database +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: any + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: any + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: any + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: any + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + write_behavior: Optional[Any] = None, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = write_behavior + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -24913,32 +25634,35 @@ class MongoDbAtlasSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: object + :type filter: any :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -24954,6 +25678,7 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -24964,18 +25689,19 @@ class MongoDbAtlasSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - filter: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + filter: Optional[Any] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, - batch_size: Optional[object] = None, - query_timeout: Optional[object] = None, + batch_size: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbAtlasSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -24991,29 +25717,29 @@ class MongoDbCollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection_name: object + :type collection_name: any """ _validation = { @@ -25039,13 +25765,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection_name: object, - additional_properties: Optional[Dict[str, object]] = None, + collection_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -25059,22 +25785,22 @@ class MongoDbCursorMethodsProperties(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). - :type project: object + :type project: any :param sort: Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). - :type sort: object + :type sort: any :param skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). - :type skip: object + :type skip: any :param limit: Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). - :type limit: object + :type limit: any """ _attribute_map = { @@ -25088,11 +25814,11 @@ class MongoDbCursorMethodsProperties(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - project: Optional[object] = None, - sort: Optional[object] = None, - skip: Optional[object] = None, - limit: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + project: Optional[Any] = None, + sort: Optional[Any] = None, + skip: Optional[Any] = None, + limit: Optional[Any] = None, **kwargs ): super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) @@ -25110,7 +25836,7 @@ class MongoDbLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25120,37 +25846,37 @@ class MongoDbLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database_name: object + :type database_name: any :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). - :type auth_source: object + :type auth_source: any :param port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object + :type enable_ssl: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -25181,21 +25907,21 @@ class MongoDbLinkedService(LinkedService): def __init__( self, *, - server: object, - database_name: object, - additional_properties: Optional[Dict[str, object]] = None, + server: Any, + database_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, authentication_type: Optional[Union[str, "MongoDbAuthenticationType"]] = None, - username: Optional[object] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - auth_source: Optional[object] = None, - port: Optional[object] = None, - enable_ssl: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + auth_source: Optional[Any] = None, + port: Optional[Any] = None, + enable_ssl: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -25219,21 +25945,24 @@ class MongoDbSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -25249,6 +25978,7 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -25256,15 +25986,16 @@ class MongoDbSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns @@ -25277,29 +26008,29 @@ class MongoDbV2CollectionDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). - :type collection: object + :type collection: any """ _validation = { @@ -25325,13 +26056,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection: object, - additional_properties: Optional[Dict[str, object]] = None, + collection: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -25347,7 +26078,7 @@ class MongoDbV2LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25357,13 +26088,13 @@ class MongoDbV2LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param database: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type database: object + :type database: any """ _validation = { @@ -25386,13 +26117,13 @@ class MongoDbV2LinkedService(LinkedService): def __init__( self, *, - connection_string: object, - database: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + database: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -25401,6 +26132,74 @@ def __init__( self.database = database +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: any + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: any + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: any + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: any + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + write_behavior: Optional[Any] = None, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = write_behavior + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -25408,32 +26207,35 @@ class MongoDbV2Source(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type filter: object + :type filter: any :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -25449,6 +26251,7 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -25459,18 +26262,19 @@ class MongoDbV2Source(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - filter: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + filter: Optional[Any] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, - batch_size: Optional[object] = None, - query_timeout: Optional[object] = None, + batch_size: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25486,7 +26290,7 @@ class MySqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25496,15 +26300,15 @@ class MySqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -25527,14 +26331,14 @@ class MySqlLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -25551,26 +26355,29 @@ class MySqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -25583,6 +26390,7 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25591,16 +26399,17 @@ class MySqlSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query @@ -25612,28 +26421,28 @@ class MySqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -25658,14 +26467,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -25680,7 +26489,7 @@ class NetezzaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25690,16 +26499,16 @@ class NetezzaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -25721,14 +26530,14 @@ class NetezzaLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -25743,15 +26552,15 @@ class NetezzaPartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -25763,9 +26572,9 @@ class NetezzaPartitionSettings(msrest.serialization.Model): def __init__( self, *, - partition_column_name: Optional[object] = None, - partition_upper_bound: Optional[object] = None, - partition_lower_bound: Optional[object] = None, + partition_column_name: Optional[Any] = None, + partition_upper_bound: Optional[Any] = None, + partition_lower_bound: Optional[Any] = None, **kwargs ): super(NetezzaPartitionSettings, self).__init__(**kwargs) @@ -25781,30 +26590,33 @@ class NetezzaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ @@ -25819,6 +26631,7 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25829,18 +26642,19 @@ class NetezzaSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, - partition_option: Optional[object] = None, + query: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'NetezzaSource' # type: str self.query = query self.partition_option = partition_option @@ -25854,35 +26668,35 @@ class NetezzaTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Netezza. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -25909,16 +26723,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -25935,7 +26749,7 @@ class ODataLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -25945,35 +26759,35 @@ class ODataLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", "ManagedServiceIdentity". :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: object + :type auth_headers: any :param tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). - :type tenant: object + :type tenant: any :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). - :type aad_resource_id: object + :type aad_resource_id: any :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or @@ -25992,7 +26806,7 @@ class ODataLinkedService(LinkedService): :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -26026,25 +26840,25 @@ class ODataLinkedService(LinkedService): def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, authentication_type: Optional[Union[str, "ODataAuthenticationType"]] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - auth_headers: Optional[object] = None, - tenant: Optional[object] = None, - service_principal_id: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - aad_resource_id: Optional[object] = None, + auth_headers: Optional[Any] = None, + tenant: Optional[Any] = None, + service_principal_id: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + aad_resource_id: Optional[Any] = None, aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, service_principal_key: Optional["SecretBase"] = None, service_principal_embedded_cert: Optional["SecretBase"] = None, service_principal_embedded_cert_password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -26072,28 +26886,28 @@ class ODataResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -26118,14 +26932,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - path: Optional[object] = None, + path: Optional[Any] = None, **kwargs ): super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -26140,26 +26954,29 @@ class ODataSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -26175,6 +26992,7 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -26183,16 +27001,17 @@ class ODataSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, - http_request_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ODataSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -26206,7 +27025,7 @@ class OdbcLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -26216,26 +27035,26 @@ class OdbcLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. + :type authentication_type: any + :param credential: The access credential portion of the connection string specified in + driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -26261,17 +27080,17 @@ class OdbcLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, + annotations: Optional[List[Any]] = None, + authentication_type: Optional[Any] = None, credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -26291,27 +27110,30 @@ class OdbcSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -26326,22 +27148,24 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, **kwargs ): - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OdbcSink' # type: str self.pre_copy_script = pre_copy_script @@ -26353,26 +27177,29 @@ class OdbcSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -26385,6 +27212,7 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26393,16 +27221,17 @@ class OdbcSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OdbcSource' # type: str self.query = query @@ -26414,28 +27243,28 @@ class OdbcTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The ODBC table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -26460,14 +27289,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -26482,32 +27311,32 @@ class Office365Dataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any :param predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: object + :type predicate: any """ _validation = { @@ -26534,15 +27363,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - table_name: object, - additional_properties: Optional[Dict[str, object]] = None, + table_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - predicate: Optional[object] = None, + predicate: Optional[Any] = None, **kwargs ): super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -26558,7 +27387,7 @@ class Office365LinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -26568,22 +27397,22 @@ class Office365LinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object + :type office365_tenant_id: any :param service_principal_tenant_id: Required. Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: object + :type service_principal_tenant_id: any :param service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: Required. Specify the application's key. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -26611,16 +27440,16 @@ class Office365LinkedService(LinkedService): def __init__( self, *, - office365_tenant_id: object, - service_principal_tenant_id: object, - service_principal_id: object, + office365_tenant_id: Any, + service_principal_tenant_id: Any, + service_principal_id: Any, service_principal_key: "SecretBase", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - encrypted_credential: Optional[object] = None, + annotations: Optional[List[Any]] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -26639,37 +27468,40 @@ class Office365Source(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). - :type allowed_groups: object + :type allowed_groups: any :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). - :type user_scope_filter_uri: object + :type user_scope_filter_uri: any :param date_filter_column: The Column to apply the :code:`` and :code:``. Type: string (or Expression with resultType string). - :type date_filter_column: object + :type date_filter_column: any :param start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type start_time: object + :type start_time: any :param end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). - :type end_time: object + :type end_time: any :param output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ]. - :type output_columns: object + :type output_columns: any """ _validation = { @@ -26682,6 +27514,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -26693,19 +27526,20 @@ class Office365Source(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - allowed_groups: Optional[object] = None, - user_scope_filter_uri: Optional[object] = None, - date_filter_column: Optional[object] = None, - start_time: Optional[object] = None, - end_time: Optional[object] = None, - output_columns: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + allowed_groups: Optional[Any] = None, + user_scope_filter_uri: Optional[Any] = None, + date_filter_column: Optional[Any] = None, + start_time: Optional[Any] = None, + end_time: Optional[Any] = None, + output_columns: Optional[Any] = None, **kwargs ): - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'Office365Source' # type: str self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri @@ -27000,7 +27834,7 @@ class OracleCloudStorageLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -27010,10 +27844,10 @@ class OracleCloudStorageLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param access_key_id: The access key identifier of the Oracle Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object + :type access_key_id: any :param secret_access_key: The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase @@ -27021,11 +27855,11 @@ class OracleCloudStorageLinkedService(LinkedService): Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). - :type service_url: object + :type service_url: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -27048,15 +27882,15 @@ class OracleCloudStorageLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + access_key_id: Optional[Any] = None, secret_access_key: Optional["SecretBase"] = None, - service_url: Optional[object] = None, - encrypted_credential: Optional[object] = None, + service_url: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(OracleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -27074,21 +27908,21 @@ class OracleCloudStorageLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any :param version: Specify the version of Oracle Cloud Storage. Type: string (or Expression with resultType string). - :type version: object + :type version: any """ _validation = { @@ -27107,11 +27941,11 @@ class OracleCloudStorageLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - bucket_name: Optional[object] = None, - version: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, + bucket_name: Optional[Any] = None, + version: Optional[Any] = None, **kwargs ): super(OracleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -27127,42 +27961,45 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param prefix: The prefix filter for the Oracle Cloud Storage object name. Type: string (or Expression with resultType string). - :type prefix: object + :type prefix: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -27173,6 +28010,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -27188,21 +28026,22 @@ class OracleCloudStorageReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - prefix: Optional[object] = None, - file_list_path: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, + prefix: Optional[Any] = None, + file_list_path: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -27223,7 +28062,7 @@ class OracleLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -27233,16 +28072,16 @@ class OracleLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -27265,14 +28104,14 @@ class OracleLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -27286,18 +28125,18 @@ class OraclePartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Oracle source partitioning. :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object + :type partition_names: any :param partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -27310,10 +28149,10 @@ class OraclePartitionSettings(msrest.serialization.Model): def __init__( self, *, - partition_names: Optional[object] = None, - partition_column_name: Optional[object] = None, - partition_upper_bound: Optional[object] = None, - partition_lower_bound: Optional[object] = None, + partition_names: Optional[Any] = None, + partition_column_name: Optional[Any] = None, + partition_upper_bound: Optional[Any] = None, + partition_lower_bound: Optional[Any] = None, **kwargs ): super(OraclePartitionSettings, self).__init__(**kwargs) @@ -27330,7 +28169,7 @@ class OracleServiceCloudLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -27340,29 +28179,29 @@ class OracleServiceCloudLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object + :type host: any :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: object + :type username: any :param password: Required. The password corresponding to the user name that you provided in the username key. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -27391,18 +28230,18 @@ class OracleServiceCloudLinkedService(LinkedService): def __init__( self, *, - host: object, - username: object, + host: Any, + username: Any, password: "SecretBase", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + annotations: Optional[List[Any]] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -27423,28 +28262,28 @@ class OracleServiceCloudObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -27469,14 +28308,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -27491,27 +28330,30 @@ class OracleServiceCloudSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -27524,6 +28366,7 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -27532,16 +28375,17 @@ class OracleServiceCloudSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -27553,27 +28397,30 @@ class OracleSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any """ _validation = { @@ -27588,22 +28435,24 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, **kwargs ): - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSink' # type: str self.pre_copy_script = pre_copy_script @@ -27615,27 +28464,30 @@ class OracleSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). - :type oracle_reader_query: object + :type oracle_reader_query: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -27653,6 +28505,7 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -27663,18 +28516,19 @@ class OracleSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - oracle_reader_query: Optional[object] = None, - query_timeout: Optional[object] = None, - partition_option: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + oracle_reader_query: Optional[Any] = None, + query_timeout: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["OraclePartitionSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSource' # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout @@ -27690,35 +28544,35 @@ class OracleTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -27745,16 +28599,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -27771,30 +28625,31 @@ class OrcDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: any """ _validation = { @@ -27813,22 +28668,22 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None, + orc_compression_codec: Optional[Any] = None, **kwargs ): super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -27844,13 +28699,13 @@ class OrcFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -27867,9 +28722,9 @@ class OrcFormat(DatasetStorageFormat): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - serializer: Optional[object] = None, - deserializer: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + serializer: Optional[Any] = None, + deserializer: Optional[Any] = None, **kwargs ): super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) @@ -27883,24 +28738,27 @@ class OrcSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: ORC format settings. @@ -27919,6 +28777,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -27926,17 +28785,18 @@ class OrcSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): - super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -27949,18 +28809,21 @@ class OrcSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -27978,6 +28841,7 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -27985,15 +28849,16 @@ class OrcSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28006,16 +28871,16 @@ class OrcWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -28032,9 +28897,9 @@ class OrcWriteSettings(FormatWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_rows_per_file: Optional[object] = None, - file_name_prefix: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_rows_per_file: Optional[Any] = None, + file_name_prefix: Optional[Any] = None, **kwargs ): super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -28085,7 +28950,7 @@ class ParameterSpecification(msrest.serialization.Model): "Float", "Bool", "Array", "SecureString". :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. - :type default_value: object + :type default_value: any """ _validation = { @@ -28101,7 +28966,7 @@ def __init__( self, *, type: Union[str, "ParameterType"], - default_value: Optional[object] = None, + default_value: Optional[Any] = None, **kwargs ): super(ParameterSpecification, self).__init__(**kwargs) @@ -28116,31 +28981,31 @@ class ParquetDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~azure.mgmt.datafactory.models.CompressionCodec + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: any """ _validation = { @@ -28159,22 +29024,22 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, + compression_codec: Optional[Any] = None, **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -28190,13 +29055,13 @@ class ParquetFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any """ _validation = { @@ -28213,9 +29078,9 @@ class ParquetFormat(DatasetStorageFormat): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - serializer: Optional[object] = None, - deserializer: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + serializer: Optional[Any] = None, + deserializer: Optional[Any] = None, **kwargs ): super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) @@ -28229,24 +29094,27 @@ class ParquetSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Parquet format settings. @@ -28265,6 +29133,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -28272,17 +29141,18 @@ class ParquetSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28295,18 +29165,21 @@ class ParquetSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -28324,6 +29197,7 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -28331,15 +29205,16 @@ class ParquetSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28352,16 +29227,16 @@ class ParquetWriteSettings(FormatWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). - :type max_rows_per_file: object + :type max_rows_per_file: any :param file_name_prefix: Specifies the file name pattern :code:``_:code:``.:code:`` when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). - :type file_name_prefix: object + :type file_name_prefix: any """ _validation = { @@ -28378,9 +29253,9 @@ class ParquetWriteSettings(FormatWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_rows_per_file: Optional[object] = None, - file_name_prefix: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_rows_per_file: Optional[Any] = None, + file_name_prefix: Optional[Any] = None, **kwargs ): super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -28396,7 +29271,7 @@ class PaypalLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28406,27 +29281,27 @@ class PaypalLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: object + :type host: any :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with your PayPal application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -28454,18 +29329,18 @@ class PaypalLinkedService(LinkedService): def __init__( self, *, - host: object, - client_id: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + client_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -28486,28 +29361,28 @@ class PaypalObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -28532,14 +29407,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -28554,27 +29429,30 @@ class PaypalSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -28587,6 +29465,7 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28595,16 +29474,17 @@ class PaypalSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PaypalSource' # type: str self.query = query @@ -28616,7 +29496,7 @@ class PhoenixLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -28626,45 +29506,45 @@ class PhoenixLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Phoenix server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. - :type port: object + :type port: any :param http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. - :type http_path: object + :type http_path: any :param authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", "WindowsAzureHDInsightService". :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :param username: The user name used to connect to the Phoenix server. - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -28697,23 +29577,23 @@ class PhoenixLinkedService(LinkedService): def __init__( self, *, - host: object, + host: Any, authentication_type: Union[str, "PhoenixAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - http_path: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, + http_path: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + enable_ssl: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + allow_host_name_cn_mismatch: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -28739,35 +29619,35 @@ class PhoenixObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Phoenix. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -28794,16 +29674,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -28820,27 +29700,30 @@ class PhoenixSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -28853,6 +29736,7 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28861,16 +29745,17 @@ class PhoenixSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PhoenixSource' # type: str self.query = query @@ -28879,7 +29764,7 @@ class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): """Pipeline ElapsedTime Metric Policy. :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. - :type duration: object + :type duration: any """ _attribute_map = { @@ -28889,7 +29774,7 @@ class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): def __init__( self, *, - duration: Optional[object] = None, + duration: Optional[Any] = None, **kwargs ): super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) @@ -28977,7 +29862,7 @@ class PipelineReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". + :ivar type: Pipeline reference type. Has constant value: "PipelineReference". :vartype type: str :param reference_name: Required. Reference pipeline name. :type reference_name: str @@ -29025,7 +29910,7 @@ class PipelineResource(SubResource): :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param description: The description of the pipeline. :type description: str :param activities: List of activities in pipeline. @@ -29037,9 +29922,9 @@ class PipelineResource(SubResource): :param concurrency: The max number of concurrent runs for the pipeline. :type concurrency: int :param annotations: List of tags that can be used for describing the Pipeline. - :type annotations: list[object] + :type annotations: list[any] :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] + :type run_dimensions: dict[str, any] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -29075,14 +29960,14 @@ class PipelineResource(SubResource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, activities: Optional[List["Activity"]] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, variables: Optional[Dict[str, "VariableSpecification"]] = None, concurrency: Optional[int] = None, - annotations: Optional[List[object]] = None, - run_dimensions: Optional[Dict[str, object]] = None, + annotations: Optional[List[Any]] = None, + run_dimensions: Optional[Dict[str, Any]] = None, folder: Optional["PipelineFolder"] = None, policy: Optional["PipelinePolicy"] = None, **kwargs @@ -29107,7 +29992,7 @@ class PipelineRun(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar run_id: Identifier of a run. :vartype run_id: str :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. @@ -29173,7 +30058,7 @@ class PipelineRun(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(PipelineRun, self).__init__(**kwargs) @@ -29204,18 +30089,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model): :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str + :ivar pipeline_name: The name of the pipeline that triggered the run, if any. + :vartype pipeline_name: str + :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any. + :vartype pipeline_run_id: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, } def __init__( @@ -29226,6 +30119,8 @@ def __init__( self.name = None self.id = None self.invoked_by_type = None + self.pipeline_name = None + self.pipeline_run_id = None class PipelineRunsQueryResponse(msrest.serialization.Model): @@ -29266,20 +30161,20 @@ class PolybaseSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param reject_type: Reject type. Possible values include: "value", "percentage". :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :param reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. - :type reject_value: object + :type reject_value: any :param reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object + :type reject_sample_value: any :param use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). - :type use_type_default: object + :type use_type_default: any """ _attribute_map = { @@ -29293,11 +30188,11 @@ class PolybaseSettings(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, reject_type: Optional[Union[str, "PolybaseSettingsRejectType"]] = None, - reject_value: Optional[object] = None, - reject_sample_value: Optional[object] = None, - use_type_default: Optional[object] = None, + reject_value: Optional[Any] = None, + reject_sample_value: Optional[Any] = None, + use_type_default: Optional[Any] = None, **kwargs ): super(PolybaseSettings, self).__init__(**kwargs) @@ -29315,7 +30210,7 @@ class PostgreSqlLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -29325,15 +30220,15 @@ class PostgreSqlLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -29356,14 +30251,14 @@ class PostgreSqlLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -29380,26 +30275,29 @@ class PostgreSqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -29412,6 +30310,7 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -29420,16 +30319,17 @@ class PostgreSqlSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query @@ -29441,34 +30341,34 @@ class PostgreSqlTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -29495,16 +30395,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -29521,7 +30421,7 @@ class PrestoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -29531,47 +30431,47 @@ class PrestoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160). - :type host: object + :type host: any :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: object + :type server_version: any :param catalog: Required. The catalog context for all request against the server. - :type catalog: object + :type catalog: any :param port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. - :type port: object + :type port: any :param authentication_type: Required. The authentication mechanism used to connect to the Presto server. Possible values include: "Anonymous", "LDAP". :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. - :type username: object + :type username: any :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: object + :type time_zone_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -29608,25 +30508,25 @@ class PrestoLinkedService(LinkedService): def __init__( self, *, - host: object, - server_version: object, - catalog: object, + host: Any, + server_version: Any, + catalog: Any, authentication_type: Union[str, "PrestoAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - time_zone_id: Optional[object] = None, - encrypted_credential: Optional[object] = None, + enable_ssl: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + allow_host_name_cn_mismatch: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + time_zone_id: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -29654,35 +30554,35 @@ class PrestoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Presto. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -29709,16 +30609,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -29735,27 +30635,30 @@ class PrestoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -29768,6 +30671,7 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -29776,20 +30680,294 @@ class PrestoSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PrestoSource' # type: str self.query = query +class PrivateEndpointConnectionListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Private Endpoint Connections. + :type value: list[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["PrivateEndpointConnectionResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(PrivateEndpointConnectionListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PrivateEndpointConnectionResource(SubResource): + """Private Endpoint Connection ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + } + + def __init__( + self, + *, + properties: Optional["RemotePrivateEndpointConnection"] = None, + **kwargs + ): + super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkConnectionApprovalRequest(msrest.serialization.Model): + """A request to approve or reject a private endpoint connection. + + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + *, + private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + self.private_link_service_connection_state = private_link_service_connection_state + + +class PrivateLinkConnectionApprovalRequestResource(SubResource): + """Private Endpoint Connection Approval ARM resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + } + + def __init__( + self, + *, + properties: Optional["PrivateLinkConnectionApprovalRequest"] = None, + **kwargs + ): + super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkConnectionState(msrest.serialization.Model): + """The state of a private link connection. + + :param status: Status of a private link connection. + :type status: str + :param description: Description of a private link connection. + :type description: str + :param actions_required: ActionsRequired for a private link connection. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[str] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + **kwargs + ): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class PrivateLinkResource(SubResource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties. + :type properties: ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + } + + def __init__( + self, + *, + properties: Optional["PrivateLinkResourceProperties"] = None, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkResourceProperties(msrest.serialization.Model): + """Properties of a private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar group_id: GroupId of a private link resource. + :vartype group_id: str + :ivar required_members: RequiredMembers of a private link resource. + :vartype required_members: list[str] + :ivar required_zone_names: RequiredZoneNames of a private link resource. + :vartype required_zone_names: list[str] + """ + + _validation = { + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + 'required_zone_names': {'readonly': True}, + } + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourceProperties, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = None + + +class PrivateLinkResourcesWrapper(msrest.serialization.Model): + """Wrapper for a collection of private link resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. + :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + *, + value: List["PrivateLinkResource"], + **kwargs + ): + super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + self.value = value + + class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. @@ -29823,7 +31001,7 @@ class QuickBooksLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -29833,16 +31011,16 @@ class QuickBooksLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). - :type endpoint: object + :type endpoint: any :param company_id: The company ID of the QuickBooks company to authorize. - :type company_id: object + :type company_id: any :param consumer_key: The consumer key for OAuth 1.0 authentication. - :type consumer_key: object + :type consumer_key: any :param consumer_secret: The consumer secret for OAuth 1.0 authentication. :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token for OAuth 1.0 authentication. @@ -29851,11 +31029,11 @@ class QuickBooksLinkedService(LinkedService): :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -29883,20 +31061,20 @@ class QuickBooksLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_properties: Optional[object] = None, - endpoint: Optional[object] = None, - company_id: Optional[object] = None, - consumer_key: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_properties: Optional[Any] = None, + endpoint: Optional[Any] = None, + company_id: Optional[Any] = None, + consumer_key: Optional[Any] = None, consumer_secret: Optional["SecretBase"] = None, access_token: Optional["SecretBase"] = None, access_token_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -29919,28 +31097,28 @@ class QuickBooksObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -29965,14 +31143,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -29987,27 +31165,30 @@ class QuickBooksSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -30020,6 +31201,7 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -30028,16 +31210,17 @@ class QuickBooksSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'QuickBooksSource' # type: str self.query = query @@ -30047,7 +31230,7 @@ class RecurrenceSchedule(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param minutes: The minutes. :type minutes: list[int] :param hours: The hours. @@ -30072,7 +31255,7 @@ class RecurrenceSchedule(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, minutes: Optional[List[int]] = None, hours: Optional[List[int]] = None, week_days: Optional[List[Union[str, "DaysOfWeek"]]] = None, @@ -30094,7 +31277,7 @@ class RecurrenceScheduleOccurrence(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday". :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek @@ -30111,7 +31294,7 @@ class RecurrenceScheduleOccurrence(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, day: Optional[Union[str, "DayOfWeek"]] = None, occurrence: Optional[int] = None, **kwargs @@ -30129,15 +31312,15 @@ class RedirectIncompatibleRowSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). - :type linked_service_name: object + :type linked_service_name: any :param path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -30153,9 +31336,9 @@ class RedirectIncompatibleRowSettings(msrest.serialization.Model): def __init__( self, *, - linked_service_name: object, - additional_properties: Optional[Dict[str, object]] = None, - path: Optional[object] = None, + linked_service_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, + path: Optional[Any] = None, **kwargs ): super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) @@ -30175,7 +31358,7 @@ class RedshiftUnloadSettings(msrest.serialization.Model): :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). - :type bucket_name: object + :type bucket_name: any """ _validation = { @@ -30192,7 +31375,7 @@ def __init__( self, *, s3_linked_service_name: "LinkedServiceReference", - bucket_name: object, + bucket_name: Any, **kwargs ): super(RedshiftUnloadSettings, self).__init__(**kwargs) @@ -30207,20 +31390,23 @@ class RelationalSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -30236,6 +31422,7 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -30243,15 +31430,16 @@ class RelationalSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RelationalSource' # type: str self.query = query self.additional_columns = additional_columns @@ -30264,29 +31452,29 @@ class RelationalTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The relational table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -30311,14 +31499,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -30326,6 +31514,43 @@ def __init__( self.table_name = table_name +class RemotePrivateEndpointConnection(msrest.serialization.Model): + """A remote private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: + :vartype provisioning_state: str + :param private_endpoint: PrivateEndpoint of a remote private endpoint connection. + :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :param private_link_service_connection_state: The state of a private link connection. + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__( + self, + *, + private_endpoint: Optional["ArmIdWrapper"] = None, + private_link_service_connection_state: Optional["PrivateLinkConnectionState"] = None, + **kwargs + ): + super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + self.provisioning_state = None + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + + class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -30335,7 +31560,7 @@ class RerunTumblingWindowTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -30344,9 +31569,9 @@ class RerunTumblingWindowTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param parent_trigger: Required. The parent trigger reference. - :type parent_trigger: object + :type parent_trigger: any :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_start_time: ~datetime.datetime @@ -30382,13 +31607,13 @@ class RerunTumblingWindowTrigger(Trigger): def __init__( self, *, - parent_trigger: object, + parent_trigger: Any, requested_start_time: datetime.datetime, requested_end_time: datetime.datetime, rerun_concurrency: int, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) @@ -30406,7 +31631,7 @@ class ResponsysLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -30416,30 +31641,30 @@ class ResponsysLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object + :type endpoint: any :param client_id: Required. The client ID associated with the Responsys application. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -30467,18 +31692,18 @@ class ResponsysLinkedService(LinkedService): def __init__( self, *, - endpoint: object, - client_id: object, - additional_properties: Optional[Dict[str, object]] = None, + endpoint: Any, + client_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -30499,28 +31724,28 @@ class ResponsysObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -30545,14 +31770,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -30567,27 +31792,30 @@ class ResponsysSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -30600,6 +31828,7 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -30608,16 +31837,17 @@ class ResponsysSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ResponsysSource' # type: str self.query = query @@ -30629,41 +31859,41 @@ class RestResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :type relative_url: object + :type relative_url: any :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: object + :type request_body: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: object + :type pagination_rules: any """ _validation = { @@ -30692,18 +31922,18 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - relative_url: Optional[object] = None, - request_method: Optional[object] = None, - request_body: Optional[object] = None, - additional_headers: Optional[object] = None, - pagination_rules: Optional[object] = None, + relative_url: Optional[Any] = None, + request_method: Optional[Any] = None, + request_body: Optional[Any] = None, + additional_headers: Optional[Any] = None, + pagination_rules: Optional[Any] = None, **kwargs ): super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -30722,7 +31952,7 @@ class RestServiceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -30732,43 +31962,43 @@ class RestServiceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The base URL of the REST service. - :type url: object + :type url: any :param enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :type enable_server_certificate_validation: any :param authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", "ManagedServiceIdentity". :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. - :type user_name: object + :type user_name: any :param password: The password used in Basic authentication type. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_headers: The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - :type auth_headers: object + :type auth_headers: any :param service_principal_id: The application's client ID used in AadServicePrincipal authentication type. - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: The application's key used in AadServicePrincipal authentication type. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. - :type tenant: object + :type tenant: any :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). - :type azure_cloud_type: object + :type azure_cloud_type: any :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: object + :type aad_resource_id: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -30801,23 +32031,23 @@ class RestServiceLinkedService(LinkedService): def __init__( self, *, - url: object, + url: Any, authentication_type: Union[str, "RestServiceAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - enable_server_certificate_validation: Optional[object] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + enable_server_certificate_validation: Optional[Any] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - auth_headers: Optional[object] = None, - service_principal_id: Optional[object] = None, + auth_headers: Optional[Any] = None, + service_principal_id: Optional[Any] = None, service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - azure_cloud_type: Optional[object] = None, - aad_resource_id: Optional[object] = None, - encrypted_credential: Optional[object] = None, + tenant: Optional[Any] = None, + azure_cloud_type: Optional[Any] = None, + aad_resource_id: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -30843,40 +32073,43 @@ class RestSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any :param request_interval: The time to await before sending next request, in milliseconds. - :type request_interval: object + :type request_interval: any :param http_compression_type: Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - :type http_compression_type: object + :type http_compression_type: any """ _validation = { @@ -30891,6 +32124,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -30901,20 +32135,21 @@ class RestSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - request_method: Optional[object] = None, - additional_headers: Optional[object] = None, - http_request_timeout: Optional[object] = None, - request_interval: Optional[object] = None, - http_compression_type: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + request_method: Optional[Any] = None, + additional_headers: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, + request_interval: Optional[Any] = None, + http_compression_type: Optional[Any] = None, **kwargs ): - super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSink' # type: str self.request_method = request_method self.additional_headers = additional_headers @@ -30930,37 +32165,40 @@ class RestSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). - :type request_method: object + :type request_method: any :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). - :type request_body: object + :type request_body: any :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object + :type additional_headers: any :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type pagination_rules: object + :type pagination_rules: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any :param request_interval: The time to await before sending next page request. - :type request_interval: object + :type request_interval: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -30976,6 +32214,7 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -30988,20 +32227,21 @@ class RestSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - request_method: Optional[object] = None, - request_body: Optional[object] = None, - additional_headers: Optional[object] = None, - pagination_rules: Optional[object] = None, - http_request_timeout: Optional[object] = None, - request_interval: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + request_method: Optional[Any] = None, + request_body: Optional[Any] = None, + additional_headers: Optional[Any] = None, + pagination_rules: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, + request_interval: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSource' # type: str self.request_method = request_method self.request_body = request_body @@ -31017,7 +32257,7 @@ class RetryPolicy(msrest.serialization.Model): :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. - :type count: object + :type count: any :param interval_in_seconds: Interval between retries in seconds. Default is 30. :type interval_in_seconds: int """ @@ -31034,7 +32274,7 @@ class RetryPolicy(msrest.serialization.Model): def __init__( self, *, - count: Optional[object] = None, + count: Optional[Any] = None, interval_in_seconds: Optional[int] = None, **kwargs ): @@ -31184,7 +32424,7 @@ class SalesforceLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -31194,26 +32434,26 @@ class SalesforceLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object + :type environment_url: any :param username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: object + :type api_version: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -31238,17 +32478,17 @@ class SalesforceLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - environment_url: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + environment_url: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, security_token: Optional["SecretBase"] = None, - api_version: Optional[object] = None, - encrypted_credential: Optional[object] = None, + api_version: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -31268,7 +32508,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -31278,31 +32518,31 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -31328,18 +32568,18 @@ class SalesforceMarketingCloudLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_properties: Optional[object] = None, - client_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_properties: Optional[Any] = None, + client_id: Optional[Any] = None, client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -31360,28 +32600,28 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -31406,14 +32646,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -31428,27 +32668,30 @@ class SalesforceMarketingCloudSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -31461,6 +32704,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31469,16 +32713,17 @@ class SalesforceMarketingCloudSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -31490,29 +32735,29 @@ class SalesforceObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). - :type object_api_name: object + :type object_api_name: any """ _validation = { @@ -31537,14 +32782,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - object_api_name: Optional[object] = None, + object_api_name: Optional[Any] = None, **kwargs ): super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -31559,7 +32804,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -31569,29 +32814,29 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object + :type environment_url: any :param username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). - :type api_version: object + :type api_version: any :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). - :type extended_properties: object + :type extended_properties: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -31617,18 +32862,18 @@ class SalesforceServiceCloudLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - environment_url: Optional[object] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + environment_url: Optional[Any] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, security_token: Optional["SecretBase"] = None, - api_version: Optional[object] = None, - extended_properties: Optional[object] = None, - encrypted_credential: Optional[object] = None, + api_version: Optional[Any] = None, + extended_properties: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -31649,29 +32894,29 @@ class SalesforceServiceCloudObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). - :type object_api_name: object + :type object_api_name: any """ _validation = { @@ -31696,14 +32941,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - object_api_name: Optional[object] = None, + object_api_name: Optional[Any] = None, **kwargs ): super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -31718,37 +32963,40 @@ class SalesforceServiceCloudSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: object + :type external_id_field_name: any :param ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any """ _validation = { @@ -31763,6 +33011,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -31771,18 +33020,19 @@ class SalesforceServiceCloudSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, - external_id_field_name: Optional[object] = None, - ignore_null_values: Optional[object] = None, + external_id_field_name: Optional[Any] = None, + ignore_null_values: Optional[Any] = None, **kwargs ): - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -31796,20 +33046,23 @@ class SalesforceServiceCloudSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior @@ -31828,6 +33081,7 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -31836,16 +33090,17 @@ class SalesforceServiceCloudSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = query self.read_behavior = read_behavior @@ -31859,37 +33114,40 @@ class SalesforceSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). - :type external_id_field_name: object + :type external_id_field_name: any :param ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object + :type ignore_null_values: any """ _validation = { @@ -31904,6 +33162,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -31912,18 +33171,19 @@ class SalesforceSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, - external_id_field_name: Optional[object] = None, - ignore_null_values: Optional[object] = None, + external_id_field_name: Optional[Any] = None, + ignore_null_values: Optional[Any] = None, **kwargs ): - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -31937,26 +33197,29 @@ class SalesforceSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior @@ -31972,6 +33235,7 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31981,17 +33245,18 @@ class SalesforceSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32004,23 +33269,23 @@ class SapBwCubeDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -32047,12 +33312,12 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -32067,7 +33332,7 @@ class SapBWLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -32077,25 +33342,25 @@ class SapBWLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param system_number: Required. System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object + :type system_number: any :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP BW server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -32123,17 +33388,17 @@ class SapBWLinkedService(LinkedService): def __init__( self, *, - server: object, - system_number: object, - client_id: object, - additional_properties: Optional[Dict[str, object]] = None, + server: Any, + system_number: Any, + client_id: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -32153,26 +33418,29 @@ class SapBwSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -32185,6 +33453,7 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32193,16 +33462,17 @@ class SapBwSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapBwSource' # type: str self.query = query @@ -32214,7 +33484,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -32224,20 +33494,20 @@ class SapCloudForCustomerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param username: The username for Basic authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -32261,15 +33531,15 @@ class SapCloudForCustomerLinkedService(LinkedService): def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -32287,29 +33557,29 @@ class SapCloudForCustomerResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -32335,13 +33605,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - path: object, - additional_properties: Optional[Dict[str, object]] = None, + path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -32357,24 +33627,27 @@ class SapCloudForCustomerSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or @@ -32383,7 +33656,7 @@ class SapCloudForCustomerSink(CopySink): to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -32398,6 +33671,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -32405,17 +33679,18 @@ class SapCloudForCustomerSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, - http_request_timeout: Optional[object] = None, + http_request_timeout: Optional[Any] = None, **kwargs ): - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout @@ -32428,32 +33703,35 @@ class SapCloudForCustomerSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -32466,6 +33744,7 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32475,17 +33754,18 @@ class SapCloudForCustomerSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, - http_request_timeout: Optional[object] = None, + query: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, **kwargs ): - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -32498,7 +33778,7 @@ class SapEccLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -32508,7 +33788,7 @@ class SapEccLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param url: Required. The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). @@ -32546,11 +33826,11 @@ def __init__( self, *, url: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, username: Optional[str] = None, password: Optional["SecretBase"] = None, encrypted_credential: Optional[str] = None, @@ -32571,29 +33851,29 @@ class SapEccResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -32619,13 +33899,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - path: object, - additional_properties: Optional[Dict[str, object]] = None, + path: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -32641,32 +33921,35 @@ class SapEccSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -32679,6 +33962,7 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32688,17 +33972,18 @@ class SapEccSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, - http_request_timeout: Optional[object] = None, + query: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, **kwargs ): - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapEccSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -32711,7 +33996,7 @@ class SapHanaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -32721,25 +34006,25 @@ class SapHanaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: "Basic", "Windows". :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :param user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP HANA server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -32764,17 +34049,17 @@ class SapHanaLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - server: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, + server: Optional[Any] = None, authentication_type: Optional[Union[str, "SapHanaAuthenticationType"]] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -32792,7 +34077,7 @@ class SapHanaPartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any """ _attribute_map = { @@ -32802,7 +34087,7 @@ class SapHanaPartitionSettings(msrest.serialization.Model): def __init__( self, *, - partition_column_name: Optional[object] = None, + partition_column_name: Optional[Any] = None, **kwargs ): super(SapHanaPartitionSettings, self).__init__(**kwargs) @@ -32816,32 +34101,35 @@ class SapHanaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). - :type packet_size: object + :type packet_size: any :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings @@ -32857,6 +34145,7 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32868,19 +34157,20 @@ class SapHanaSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, - packet_size: Optional[object] = None, - partition_option: Optional[object] = None, + query: Optional[Any] = None, + packet_size: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapHanaSource' # type: str self.query = query self.packet_size = packet_size @@ -32895,31 +34185,31 @@ class SapHanaTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -32945,15 +34235,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -32969,7 +34259,7 @@ class SapOpenHubLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -32979,43 +34269,43 @@ class SapOpenHubLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object + :type system_number: any :param client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). - :type language: object + :type language: any :param system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: object + :type system_id: any :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: object + :type message_server: any :param message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object + :type message_server_service: any :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: object + :type logon_group: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -33045,22 +34335,22 @@ class SapOpenHubLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - server: Optional[object] = None, - system_number: Optional[object] = None, - client_id: Optional[object] = None, - language: Optional[object] = None, - system_id: Optional[object] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + server: Optional[Any] = None, + system_number: Optional[Any] = None, + client_id: Optional[Any] = None, + language: Optional[Any] = None, + system_id: Optional[Any] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - message_server: Optional[object] = None, - message_server_service: Optional[object] = None, - logon_group: Optional[object] = None, - encrypted_credential: Optional[object] = None, + message_server: Optional[Any] = None, + message_server_service: Optional[Any] = None, + logon_group: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -33085,38 +34375,41 @@ class SapOpenHubSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object + :type exclude_last_request: any :param base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: object + :type base_request_id: any :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object + :type custom_rfc_read_table_function_module: any :param sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: object + :type sap_data_column_delimiter: any """ _validation = { @@ -33129,6 +34422,7 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, @@ -33140,19 +34434,20 @@ class SapOpenHubSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - exclude_last_request: Optional[object] = None, - base_request_id: Optional[object] = None, - custom_rfc_read_table_function_module: Optional[object] = None, - sap_data_column_delimiter: Optional[object] = None, + exclude_last_request: Optional[Any] = None, + base_request_id: Optional[Any] = None, + custom_rfc_read_table_function_module: Optional[Any] = None, + sap_data_column_delimiter: Optional[Any] = None, **kwargs ): - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id @@ -33167,36 +34462,36 @@ class SapOpenHubTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: object + :type open_hub_destination_name: any :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object + :type exclude_last_request: any :param base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). - :type base_request_id: object + :type base_request_id: any """ _validation = { @@ -33224,16 +34519,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - open_hub_destination_name: object, - additional_properties: Optional[Dict[str, object]] = None, + open_hub_destination_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - exclude_last_request: Optional[object] = None, - base_request_id: Optional[object] = None, + exclude_last_request: Optional[Any] = None, + base_request_id: Optional[Any] = None, **kwargs ): super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -33250,7 +34545,7 @@ class SapTableLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -33260,57 +34555,57 @@ class SapTableLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object + :type system_number: any :param client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object + :type client_id: any :param language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). - :type language: object + :type language: any :param system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). - :type system_id: object + :type system_id: any :param user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to access the SAP server where the table is located. :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). - :type message_server: object + :type message_server: any :param message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object + :type message_server_service: any :param snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: object + :type snc_mode: any :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object + :type snc_my_name: any :param snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: object + :type snc_partner_name: any :param snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). - :type snc_library_path: object + :type snc_library_path: any :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object + :type snc_qop: any :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). - :type logon_group: object + :type logon_group: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -33345,27 +34640,27 @@ class SapTableLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - server: Optional[object] = None, - system_number: Optional[object] = None, - client_id: Optional[object] = None, - language: Optional[object] = None, - system_id: Optional[object] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + server: Optional[Any] = None, + system_number: Optional[Any] = None, + client_id: Optional[Any] = None, + language: Optional[Any] = None, + system_id: Optional[Any] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - message_server: Optional[object] = None, - message_server_service: Optional[object] = None, - snc_mode: Optional[object] = None, - snc_my_name: Optional[object] = None, - snc_partner_name: Optional[object] = None, - snc_library_path: Optional[object] = None, - snc_qop: Optional[object] = None, - logon_group: Optional[object] = None, - encrypted_credential: Optional[object] = None, + message_server: Optional[Any] = None, + message_server_service: Optional[Any] = None, + snc_mode: Optional[Any] = None, + snc_my_name: Optional[Any] = None, + snc_partner_name: Optional[Any] = None, + snc_library_path: Optional[Any] = None, + snc_qop: Optional[Any] = None, + logon_group: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -33393,18 +34688,18 @@ class SapTablePartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any :param max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object + :type max_partitions_number: any """ _attribute_map = { @@ -33417,10 +34712,10 @@ class SapTablePartitionSettings(msrest.serialization.Model): def __init__( self, *, - partition_column_name: Optional[object] = None, - partition_upper_bound: Optional[object] = None, - partition_lower_bound: Optional[object] = None, - max_partitions_number: Optional[object] = None, + partition_column_name: Optional[Any] = None, + partition_upper_bound: Optional[Any] = None, + partition_lower_bound: Optional[Any] = None, + max_partitions_number: Optional[Any] = None, **kwargs ): super(SapTablePartitionSettings, self).__init__(**kwargs) @@ -33437,29 +34732,29 @@ class SapTableResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -33485,13 +34780,13 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - table_name: object, - additional_properties: Optional[Dict[str, object]] = None, + table_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, **kwargs ): @@ -33507,50 +34802,53 @@ class SapTableSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). - :type row_count: object + :type row_count: any :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). - :type row_skips: object + :type row_skips: any :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). - :type rfc_table_fields: object + :type rfc_table_fields: any :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). - :type rfc_table_options: object + :type rfc_table_options: any :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). - :type batch_size: object + :type batch_size: any :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object + :type custom_rfc_read_table_function_module: any :param sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). - :type sap_data_column_delimiter: object + :type sap_data_column_delimiter: any :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings @@ -33566,6 +34864,7 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, @@ -33582,24 +34881,25 @@ class SapTableSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - row_count: Optional[object] = None, - row_skips: Optional[object] = None, - rfc_table_fields: Optional[object] = None, - rfc_table_options: Optional[object] = None, - batch_size: Optional[object] = None, - custom_rfc_read_table_function_module: Optional[object] = None, - sap_data_column_delimiter: Optional[object] = None, - partition_option: Optional[object] = None, + row_count: Optional[Any] = None, + row_skips: Optional[Any] = None, + rfc_table_fields: Optional[Any] = None, + rfc_table_options: Optional[Any] = None, + batch_size: Optional[Any] = None, + custom_rfc_read_table_function_module: Optional[Any] = None, + sap_data_column_delimiter: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapTableSource' # type: str self.row_count = row_count self.row_skips = row_skips @@ -33621,7 +34921,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -33630,7 +34930,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. @@ -33657,9 +34957,9 @@ def __init__( self, *, recurrence: "ScheduleTriggerRecurrence", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): @@ -33673,7 +34973,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", "Day", "Week", "Month", "Year". :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency @@ -33702,7 +35002,7 @@ class ScheduleTriggerRecurrence(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, frequency: Optional[Union[str, "RecurrenceFrequency"]] = None, interval: Optional[int] = None, start_time: Optional[datetime.datetime] = None, @@ -33730,9 +35030,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~azure.mgmt.datafactory.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -33755,7 +35054,7 @@ def __init__( *, name: str, uri: str, - roles: Union[str, "HdiNodeTypes"], + roles: str, parameters: Optional[str] = None, **kwargs ): @@ -33845,7 +35144,7 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -33869,7 +35168,7 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, linked_info: Optional["LinkedIntegrationRuntimeType"] = None, **kwargs @@ -33886,7 +35185,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. @@ -33978,7 +35277,7 @@ class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) @@ -34012,7 +35311,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of integration runtime.Constant filled by server. Possible values include: "Managed", "SelfHosted". :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType @@ -34110,7 +35409,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, nodes: Optional[List["SelfHostedIntegrationRuntimeNode"]] = None, links: Optional[List["LinkedIntegrationRuntime"]] = None, **kwargs @@ -34142,7 +35441,7 @@ class ServiceNowLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -34152,37 +35451,37 @@ class ServiceNowLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. :code:``.service-now.com). - :type endpoint: object + :type endpoint: any :param authentication_type: Required. The authentication type to use. Possible values include: "Basic", "OAuth2". :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. - :type username: object + :type username: any :param password: The password corresponding to the user name for Basic and OAuth2 authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id for OAuth2 authentication. - :type client_id: object + :type client_id: any :param client_secret: The client secret for OAuth2 authentication. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -34213,21 +35512,21 @@ class ServiceNowLinkedService(LinkedService): def __init__( self, *, - endpoint: object, + endpoint: Any, authentication_type: Union[str, "ServiceNowAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - username: Optional[object] = None, + annotations: Optional[List[Any]] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - client_id: Optional[object] = None, + client_id: Optional[Any] = None, client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -34251,28 +35550,28 @@ class ServiceNowObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -34297,14 +35596,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -34319,27 +35618,30 @@ class ServiceNowSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -34352,6 +35654,7 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -34360,28 +35663,29 @@ class ServiceNowSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ServiceNowSource' # type: str self.query = query -class SetVariableActivity(Activity): +class SetVariableActivity(ControlActivity): """Set value for a Variable. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -34395,7 +35699,7 @@ class SetVariableActivity(Activity): :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str :param value: Value to be set. Could be a static value or Expression. - :type value: object + :type value: any """ _validation = { @@ -34418,12 +35722,12 @@ def __init__( self, *, name: str, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, variable_name: Optional[str] = None, - value: Optional[object] = None, + value: Optional[Any] = None, **kwargs ): super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) @@ -34439,15 +35743,15 @@ class SftpLocation(DatasetLocation): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type folder_path: object + :type folder_path: any :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type file_name: object + :type file_name: any """ _validation = { @@ -34464,9 +35768,9 @@ class SftpLocation(DatasetLocation): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + folder_path: Optional[Any] = None, + file_name: Optional[Any] = None, **kwargs ): super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) @@ -34480,39 +35784,42 @@ class SftpReadSettings(StoreReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - :type recursive: object + :type recursive: any :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). - :type wildcard_folder_path: object + :type wildcard_folder_path: any :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). - :type wildcard_file_name: object + :type wildcard_file_name: any :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - :type partition_root_path: object + :type partition_root_path: any :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). - :type file_list_path: object + :type file_list_path: any :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). - :type delete_files_after_completion: object + :type delete_files_after_completion: any :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object + :type modified_datetime_start: any :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type modified_datetime_end: object + :type modified_datetime_end: any """ _validation = { @@ -34523,6 +35830,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -34537,20 +35845,21 @@ class SftpReadSettings(StoreReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + recursive: Optional[Any] = None, + wildcard_folder_path: Optional[Any] = None, + wildcard_file_name: Optional[Any] = None, enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[object] = None, - file_list_path: Optional[object] = None, - delete_files_after_completion: Optional[object] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, + partition_root_path: Optional[Any] = None, + file_list_path: Optional[Any] = None, + delete_files_after_completion: Optional[Any] = None, + modified_datetime_start: Optional[Any] = None, + modified_datetime_end: Optional[Any] = None, **kwargs ): - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SftpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -34570,7 +35879,7 @@ class SftpServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -34580,30 +35889,30 @@ class SftpServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The SFTP server host name. Type: string (or Expression with resultType string). - :type host: object + :type host: any :param port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object + :type port: any :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: "Basic", "SshPublicKey", "MultiFactor". :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: Password to logon the SFTP server for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). - :type private_key_path: object + :type private_key_path: any :param private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. @@ -34613,11 +35922,11 @@ class SftpServerLinkedService(LinkedService): :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: object + :type skip_host_key_validation: any :param host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object + :type host_key_fingerprint: any """ _validation = { @@ -34648,22 +35957,22 @@ class SftpServerLinkedService(LinkedService): def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, + annotations: Optional[List[Any]] = None, + port: Optional[Any] = None, authentication_type: Optional[Union[str, "SftpAuthenticationType"]] = None, - user_name: Optional[object] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - private_key_path: Optional[object] = None, + encrypted_credential: Optional[Any] = None, + private_key_path: Optional[Any] = None, private_key_content: Optional["SecretBase"] = None, pass_phrase: Optional["SecretBase"] = None, - skip_host_key_validation: Optional[object] = None, - host_key_fingerprint: Optional[object] = None, + skip_host_key_validation: Optional[Any] = None, + host_key_fingerprint: Optional[Any] = None, **kwargs ): super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -34688,21 +35997,24 @@ class SftpWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The write setting type.Constant filled by server. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object + :type copy_behavior: any :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). - :type operation_timeout: object + :type operation_timeout: any :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - :type use_temp_file_rename: object + :type use_temp_file_rename: any """ _validation = { @@ -34713,6 +36025,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -34721,14 +36034,15 @@ class SftpWriteSettings(StoreWriteSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - copy_behavior: Optional[object] = None, - operation_timeout: Optional[object] = None, - use_temp_file_rename: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + copy_behavior: Optional[Any] = None, + operation_timeout: Optional[Any] = None, + use_temp_file_rename: Optional[Any] = None, **kwargs ): - super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename @@ -34741,7 +36055,7 @@ class SharePointOnlineListLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -34751,26 +36065,26 @@ class SharePointOnlineListLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param site_url: Required. The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). - :type site_url: object + :type site_url: any :param tenant_id: Required. The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). - :type tenant_id: object + :type tenant_id: any :param service_principal_id: Required. The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). - :type service_principal_id: object + :type service_principal_id: any :param service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -34798,16 +36112,16 @@ class SharePointOnlineListLinkedService(LinkedService): def __init__( self, *, - site_url: object, - tenant_id: object, - service_principal_id: object, + site_url: Any, + tenant_id: Any, + service_principal_id: Any, service_principal_key: "SecretBase", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - encrypted_credential: Optional[object] = None, + annotations: Optional[List[Any]] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -34826,29 +36140,29 @@ class SharePointOnlineListResourceDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). - :type list_name: object + :type list_name: any """ _validation = { @@ -34873,14 +36187,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - list_name: Optional[object] = None, + list_name: Optional[Any] = None, **kwargs ): super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -34895,25 +36209,28 @@ class SharePointOnlineListSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: object + :type query: any :param http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object + :type http_request_timeout: any """ _validation = { @@ -34926,6 +36243,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -34933,15 +36251,16 @@ class SharePointOnlineListSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, - http_request_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, + http_request_timeout: Optional[Any] = None, **kwargs ): - super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SharePointOnlineListSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -34954,7 +36273,7 @@ class ShopifyLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -34964,26 +36283,26 @@ class ShopifyLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). - :type host: object + :type host: any :param access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -35009,17 +36328,17 @@ class ShopifyLinkedService(LinkedService): def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, + host: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, access_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -35039,28 +36358,28 @@ class ShopifyObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -35085,14 +36404,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -35107,27 +36426,30 @@ class ShopifySource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -35140,6 +36462,7 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -35148,16 +36471,17 @@ class ShopifySource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ShopifySource' # type: str self.query = query @@ -35167,10 +36491,10 @@ class SkipErrorFile(msrest.serialization.Model): :param file_missing: Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). - :type file_missing: object + :type file_missing: any :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). - :type data_inconsistency: object + :type data_inconsistency: any """ _attribute_map = { @@ -35181,8 +36505,8 @@ class SkipErrorFile(msrest.serialization.Model): def __init__( self, *, - file_missing: Optional[object] = None, - data_inconsistency: Optional[object] = None, + file_missing: Optional[Any] = None, + data_inconsistency: Optional[Any] = None, **kwargs ): super(SkipErrorFile, self).__init__(**kwargs) @@ -35197,32 +36521,32 @@ class SnowflakeDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the Snowflake database. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -35248,15 +36572,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -35272,19 +36596,19 @@ class SnowflakeExportCopyCommand(ExportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The export setting type.Constant filled by server. :type type: str :param additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, object] + :type additional_copy_options: dict[str, any] :param additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. - :type additional_format_options: dict[str, object] + :type additional_format_options: dict[str, any] """ _validation = { @@ -35301,9 +36625,9 @@ class SnowflakeExportCopyCommand(ExportSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - additional_copy_options: Optional[Dict[str, object]] = None, - additional_format_options: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + additional_copy_options: Optional[Dict[str, Any]] = None, + additional_format_options: Optional[Dict[str, Any]] = None, **kwargs ): super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) @@ -35319,19 +36643,19 @@ class SnowflakeImportCopyCommand(ImportSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The import setting type.Constant filled by server. :type type: str :param additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }. - :type additional_copy_options: dict[str, object] + :type additional_copy_options: dict[str, any] :param additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. - :type additional_format_options: dict[str, object] + :type additional_format_options: dict[str, any] """ _validation = { @@ -35348,9 +36672,9 @@ class SnowflakeImportCopyCommand(ImportSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - additional_copy_options: Optional[Dict[str, object]] = None, - additional_format_options: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + additional_copy_options: Optional[Dict[str, Any]] = None, + additional_format_options: Optional[Dict[str, Any]] = None, **kwargs ): super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) @@ -35366,7 +36690,7 @@ class SnowflakeLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -35376,16 +36700,16 @@ class SnowflakeLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string of snowflake. Type: string, SecureString. - :type connection_string: object + :type connection_string: any :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -35408,14 +36732,14 @@ class SnowflakeLinkedService(LinkedService): def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -35432,27 +36756,30 @@ class SnowflakeSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param import_settings: Snowflake import settings. :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ @@ -35469,6 +36796,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -35476,17 +36804,18 @@ class SnowflakeSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, import_settings: Optional["SnowflakeImportCopyCommand"] = None, **kwargs ): - super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -35499,20 +36828,23 @@ class SnowflakeSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query: Snowflake Sql query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param export_settings: Snowflake export settings. :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ @@ -35527,6 +36859,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -35534,15 +36867,16 @@ class SnowflakeSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query: Optional[Any] = None, export_settings: Optional["SnowflakeExportCopyCommand"] = None, **kwargs ): - super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSource' # type: str self.query = query self.export_settings = export_settings @@ -35555,7 +36889,7 @@ class SparkLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -35565,12 +36899,12 @@ class SparkLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param host: Required. IP address or host name of the Spark server. - :type host: object + :type host: any :param port: Required. The TCP port that the Spark server uses to listen for client connections. - :type port: object + :type port: any :param server_type: The type of Spark server. Possible values include: "SharkServer", "SharkServer2", "SparkThriftServer". :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType @@ -35583,32 +36917,32 @@ class SparkLinkedService(LinkedService): "WindowsAzureHDInsightService". :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :param username: The user name that you use to access Spark Server. - :type username: object + :type username: any :param password: The password corresponding to the user name that you provided in the Username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object + :type http_path: any :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. - :type enable_ssl: object + :type enable_ssl: any :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object + :type trusted_cert_path: any :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object + :type use_system_trust_store: any :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object + :type allow_host_name_cn_mismatch: any :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object + :type allow_self_signed_server_cert: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -35644,25 +36978,25 @@ class SparkLinkedService(LinkedService): def __init__( self, *, - host: object, - port: object, + host: Any, + port: Any, authentication_type: Union[str, "SparkAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, server_type: Optional[Union[str, "SparkServerType"]] = None, thrift_transport_protocol: Optional[Union[str, "SparkThriftTransportProtocol"]] = None, - username: Optional[object] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - http_path: Optional[object] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + http_path: Optional[Any] = None, + enable_ssl: Optional[Any] = None, + trusted_cert_path: Optional[Any] = None, + use_system_trust_store: Optional[Any] = None, + allow_host_name_cn_mismatch: Optional[Any] = None, + allow_self_signed_server_cert: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -35690,34 +37024,34 @@ class SparkObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -35744,16 +37078,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -35770,27 +37104,30 @@ class SparkSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -35803,6 +37140,7 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -35811,20 +37149,63 @@ class SparkSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SparkSource' # type: str self.query = query +class SqlAlwaysEncryptedProperties(msrest.serialization.Model): + """Sql always encrypted properties. + + All required parameters must be populated in order to send to Azure. + + :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV authentication type. + Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipal", "ManagedIdentity". + :type always_encrypted_akv_auth_type: str or + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Azure Key Vault authentication. Type: string (or Expression with resultType string). + :type service_principal_id: any + :param service_principal_key: The key of the service principal used to authenticate against + Azure Key Vault. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'always_encrypted_akv_auth_type': {'required': True}, + } + + _attribute_map = { + 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, + 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, + } + + def __init__( + self, + *, + always_encrypted_akv_auth_type: Union[str, "SqlAlwaysEncryptedAkvAuthType"], + service_principal_id: Optional[Any] = None, + service_principal_key: Optional["SecretBase"] = None, + **kwargs + ): + super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + self.always_encrypted_akv_auth_type = always_encrypted_akv_auth_type + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + + class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. @@ -35832,41 +37213,44 @@ class SqlDWSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). - :type allow_poly_base: object + :type allow_poly_base: any :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). - :type allow_copy_command: object + :type allow_copy_command: any :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. :type copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -35881,6 +37265,7 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, @@ -35892,21 +37277,22 @@ class SqlDWSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - pre_copy_script: Optional[object] = None, - allow_poly_base: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, + allow_poly_base: Optional[Any] = None, poly_base_settings: Optional["PolybaseSettings"] = None, - allow_copy_command: Optional[object] = None, + allow_copy_command: Optional[Any] = None, copy_command_settings: Optional["DWCopyCommandSettings"] = None, - table_option: Optional[object] = None, + table_option: Optional[Any] = None, **kwargs ): - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base @@ -35923,38 +37309,41 @@ class SqlDWSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. - :type stored_procedure_parameters: object + :type stored_procedure_parameters: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -35969,6 +37358,7 @@ class SqlDWSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -35981,20 +37371,21 @@ class SqlDWSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - sql_reader_query: Optional[object] = None, - sql_reader_stored_procedure_name: Optional[object] = None, - stored_procedure_parameters: Optional[object] = None, - partition_option: Optional[object] = None, + sql_reader_query: Optional[Any] = None, + sql_reader_stored_procedure_name: Optional[Any] = None, + stored_procedure_parameters: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36010,42 +37401,45 @@ class SqlMISink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -36060,6 +37454,7 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -36071,21 +37466,22 @@ class SqlMISink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - sql_writer_stored_procedure_name: Optional[object] = None, - sql_writer_table_type: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + sql_writer_stored_procedure_name: Optional[Any] = None, + sql_writer_table_type: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[object] = None, - table_option: Optional[object] = None, + stored_procedure_table_type_parameter_name: Optional[Any] = None, + table_option: Optional[Any] = None, **kwargs ): - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36102,39 +37498,42 @@ class SqlMISource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object + :type produce_additional_types: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -36149,6 +37548,7 @@ class SqlMISource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -36162,21 +37562,22 @@ class SqlMISource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - sql_reader_query: Optional[object] = None, - sql_reader_stored_procedure_name: Optional[object] = None, + sql_reader_query: Optional[Any] = None, + sql_reader_stored_procedure_name: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - produce_additional_types: Optional[object] = None, - partition_option: Optional[object] = None, + produce_additional_types: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36190,19 +37591,20 @@ class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be - used for proceeding partitioning. If not specified, the primary key of the table is auto- - detected and used as the partition column. Type: string (or Expression with resultType string). - :type partition_column_name: object + used for proceeding partitioning. If not specified, the primary key of the table is + auto-detected and used as the partition column. Type: string (or Expression with resultType + string). + :type partition_column_name: any :param partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -36214,9 +37616,9 @@ class SqlPartitionSettings(msrest.serialization.Model): def __init__( self, *, - partition_column_name: Optional[object] = None, - partition_upper_bound: Optional[object] = None, - partition_lower_bound: Optional[object] = None, + partition_column_name: Optional[Any] = None, + partition_upper_bound: Optional[Any] = None, + partition_lower_bound: Optional[Any] = None, **kwargs ): super(SqlPartitionSettings, self).__init__(**kwargs) @@ -36232,7 +37634,7 @@ class SqlServerLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -36242,19 +37644,21 @@ class SqlServerLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). - :type user_name: object + :type user_name: any :param password: The on-premises Windows authentication password. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -36273,20 +37677,22 @@ class SqlServerLinkedService(LinkedService): 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, + connection_string: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, + annotations: Optional[List[Any]] = None, + user_name: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, + always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None, **kwargs ): super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -36295,6 +37701,7 @@ def __init__( self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings class SqlServerSink(CopySink): @@ -36304,42 +37711,45 @@ class SqlServerSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -36354,6 +37764,7 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -36365,21 +37776,22 @@ class SqlServerSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - sql_writer_stored_procedure_name: Optional[object] = None, - sql_writer_table_type: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + sql_writer_stored_procedure_name: Optional[Any] = None, + sql_writer_table_type: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[object] = None, - table_option: Optional[object] = None, + stored_procedure_table_type_parameter_name: Optional[Any] = None, + table_option: Optional[Any] = None, **kwargs ): - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36396,39 +37808,42 @@ class SqlServerSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object + :type produce_additional_types: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -36443,6 +37858,7 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -36456,21 +37872,22 @@ class SqlServerSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - sql_reader_query: Optional[object] = None, - sql_reader_stored_procedure_name: Optional[object] = None, + sql_reader_query: Optional[Any] = None, + sql_reader_stored_procedure_name: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - produce_additional_types: Optional[object] = None, - partition_option: Optional[object] = None, + produce_additional_types: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36487,7 +37904,7 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -36504,7 +37921,7 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). - :type stored_procedure_name: object + :type stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, @@ -36534,8 +37951,8 @@ def __init__( self, *, name: str, - stored_procedure_name: object, - additional_properties: Optional[Dict[str, object]] = None, + stored_procedure_name: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -36557,35 +37974,35 @@ class SqlServerTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any :param table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -36612,16 +38029,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + table_name: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -36638,42 +38055,45 @@ class SqlSink(CopySink): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy sink type.Constant filled by server. :type type: str :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type write_batch_size: object + :type write_batch_size: any :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object + :type write_batch_timeout: any :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). - :type sink_retry_count: object + :type sink_retry_count: any :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object + :type sink_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object + :type sql_writer_stored_procedure_name: any :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type sql_writer_table_type: object + :type sql_writer_table_type: any :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). - :type pre_copy_script: object + :type pre_copy_script: any :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). - :type stored_procedure_table_type_parameter_name: object + :type stored_procedure_table_type_parameter_name: any :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). - :type table_option: object + :type table_option: any """ _validation = { @@ -36688,6 +38108,7 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -36699,21 +38120,22 @@ class SqlSink(CopySink): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - sql_writer_stored_procedure_name: Optional[object] = None, - sql_writer_table_type: Optional[object] = None, - pre_copy_script: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + write_batch_size: Optional[Any] = None, + write_batch_timeout: Optional[Any] = None, + sink_retry_count: Optional[Any] = None, + sink_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + sql_writer_stored_procedure_name: Optional[Any] = None, + sql_writer_table_type: Optional[Any] = None, + pre_copy_script: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - stored_procedure_table_type_parameter_name: Optional[object] = None, - table_option: Optional[object] = None, + stored_procedure_table_type_parameter_name: Optional[Any] = None, + table_option: Optional[Any] = None, **kwargs ): - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36730,30 +38152,33 @@ class SqlSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). - :type sql_reader_query: object + :type sql_reader_query: any :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object + :type sql_reader_stored_procedure_name: any :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, @@ -36761,10 +38186,10 @@ class SqlSource(TabularSource): :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - :type isolation_level: object + :type isolation_level: any :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ @@ -36779,6 +38204,7 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -36792,21 +38218,22 @@ class SqlSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - sql_reader_query: Optional[object] = None, - sql_reader_stored_procedure_name: Optional[object] = None, + sql_reader_query: Optional[Any] = None, + sql_reader_stored_procedure_name: Optional[Any] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, - isolation_level: Optional[object] = None, - partition_option: Optional[object] = None, + isolation_level: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36823,7 +38250,7 @@ class SquareLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -36833,33 +38260,33 @@ class SquareLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param host: The URL of the Square instance. (i.e. mystore.mysquare.com). - :type host: object + :type host: any :param client_id: The client ID associated with your Square application. - :type client_id: object + :type client_id: any :param client_secret: The client secret associated with your Square application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500). - :type redirect_uri: object + :type redirect_uri: any :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -36887,20 +38314,20 @@ class SquareLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_properties: Optional[object] = None, - host: Optional[object] = None, - client_id: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_properties: Optional[Any] = None, + host: Optional[Any] = None, + client_id: Optional[Any] = None, client_secret: Optional["SecretBase"] = None, - redirect_uri: Optional[object] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + redirect_uri: Optional[Any] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -36923,28 +38350,28 @@ class SquareObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -36969,14 +38396,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -36991,27 +38418,30 @@ class SquareSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -37024,6 +38454,7 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -37032,16 +38463,17 @@ class SquareSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SquareSource' # type: str self.query = query @@ -37052,9 +38484,9 @@ class SSISAccessCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param domain: Required. Domain for windows authentication. - :type domain: object + :type domain: any :param user_name: Required. UseName for windows authentication. - :type user_name: object + :type user_name: any :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -37074,8 +38506,8 @@ class SSISAccessCredential(msrest.serialization.Model): def __init__( self, *, - domain: object, - user_name: object, + domain: Any, + user_name: Any, password: "SecretBase", **kwargs ): @@ -37092,12 +38524,12 @@ class SSISChildPackage(msrest.serialization.Model): :param package_path: Required. Path for embedded child package. Type: string (or Expression with resultType string). - :type package_path: object + :type package_path: any :param package_name: Name for embedded child package. :type package_name: str :param package_content: Required. Content for embedded child package. Type: string (or Expression with resultType string). - :type package_content: object + :type package_content: any :param package_last_modified_date: Last modified date for embedded child package. :type package_last_modified_date: str """ @@ -37117,8 +38549,8 @@ class SSISChildPackage(msrest.serialization.Model): def __init__( self, *, - package_path: object, - package_content: object, + package_path: Any, + package_content: Any, package_name: Optional[str] = None, package_last_modified_date: Optional[str] = None, **kwargs @@ -37270,9 +38702,9 @@ class SSISExecutionCredential(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param domain: Required. Domain for windows authentication. - :type domain: object + :type domain: any :param user_name: Required. UseName for windows authentication. - :type user_name: object + :type user_name: any :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ @@ -37292,8 +38724,8 @@ class SSISExecutionCredential(msrest.serialization.Model): def __init__( self, *, - domain: object, - user_name: object, + domain: Any, + user_name: Any, password: "SecureString", **kwargs ): @@ -37310,7 +38742,7 @@ class SSISExecutionParameter(msrest.serialization.Model): :param value: Required. SSIS package execution parameter value. Type: string (or Expression with resultType string). - :type value: object + :type value: any """ _validation = { @@ -37324,7 +38756,7 @@ class SSISExecutionParameter(msrest.serialization.Model): def __init__( self, *, - value: object, + value: Any, **kwargs ): super(SSISExecutionParameter, self).__init__(**kwargs) @@ -37377,7 +38809,7 @@ class SSISLogLocation(msrest.serialization.Model): :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). - :type log_path: object + :type log_path: any :param type: Required. The type of SSIS log location. Possible values include: "File". :type type: str or ~azure.mgmt.datafactory.models.SsisLogLocationType :param access_credential: The package execution log access credential. @@ -37385,7 +38817,7 @@ class SSISLogLocation(msrest.serialization.Model): :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object + :type log_refresh_interval: any """ _validation = { @@ -37403,10 +38835,10 @@ class SSISLogLocation(msrest.serialization.Model): def __init__( self, *, - log_path: object, + log_path: Any, type: Union[str, "SsisLogLocationType"], access_credential: Optional["SSISAccessCredential"] = None, - log_refresh_interval: Optional[object] = None, + log_refresh_interval: Optional[Any] = None, **kwargs ): super(SSISLogLocation, self).__init__(**kwargs) @@ -37542,7 +38974,7 @@ class SSISPackageLocation(msrest.serialization.Model): :param package_path: The SSIS package path. Type: string (or Expression with resultType string). - :type package_path: object + :type package_path: any :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", "InlinePackage", "PackageStore". :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType @@ -37552,14 +38984,14 @@ class SSISPackageLocation(msrest.serialization.Model): :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). - :type configuration_path: object + :type configuration_path: any :param configuration_access_credential: The configuration file access credential. :type configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with resultType string). - :type package_content: object + :type package_content: any :param package_last_modified_date: The embedded package last modified date. :type package_last_modified_date: str :param child_packages: The embedded child package list. @@ -37582,14 +39014,14 @@ class SSISPackageLocation(msrest.serialization.Model): def __init__( self, *, - package_path: Optional[object] = None, + package_path: Optional[Any] = None, type: Optional[Union[str, "SsisPackageLocationType"]] = None, package_password: Optional["SecretBase"] = None, access_credential: Optional["SSISAccessCredential"] = None, - configuration_path: Optional[object] = None, + configuration_path: Optional[Any] = None, configuration_access_credential: Optional["SSISAccessCredential"] = None, package_name: Optional[str] = None, - package_content: Optional[object] = None, + package_content: Optional[Any] = None, package_last_modified_date: Optional[str] = None, child_packages: Optional[List["SSISChildPackage"]] = None, **kwargs @@ -37749,7 +39181,7 @@ class SSISPropertyOverride(msrest.serialization.Model): :param value: Required. SSIS package property override value. Type: string (or Expression with resultType string). - :type value: object + :type value: any :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true. :type is_sensitive: bool @@ -37767,7 +39199,7 @@ class SSISPropertyOverride(msrest.serialization.Model): def __init__( self, *, - value: object, + value: Any, is_sensitive: Optional[bool] = None, **kwargs ): @@ -37834,15 +39266,15 @@ class StagingSettings(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param linked_service_name: Required. Staging linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). - :type path: object + :type path: any :param enable_compression: Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_compression: object + :type enable_compression: any """ _validation = { @@ -37860,9 +39292,9 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - path: Optional[object] = None, - enable_compression: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + path: Optional[Any] = None, + enable_compression: Optional[Any] = None, **kwargs ): super(StagingSettings, self).__init__(**kwargs) @@ -37877,7 +39309,7 @@ class StoredProcedureParameter(msrest.serialization.Model): :param value: Stored procedure parameter value. Type: string (or Expression with resultType string). - :type value: object + :type value: any :param type: Stored procedure parameter type. Possible values include: "String", "Int", "Int64", "Decimal", "Guid", "Boolean", "Date". :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType @@ -37891,7 +39323,7 @@ class StoredProcedureParameter(msrest.serialization.Model): def __init__( self, *, - value: Optional[object] = None, + value: Optional[Any] = None, type: Optional[Union[str, "StoredProcedureParameterType"]] = None, **kwargs ): @@ -37900,14 +39332,14 @@ def __init__( self.type = type -class SwitchActivity(Activity): +class SwitchActivity(ControlActivity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -37953,7 +39385,7 @@ def __init__( *, name: str, on: "Expression", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -38001,7 +39433,7 @@ class SybaseLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -38011,27 +39443,27 @@ class SybaseLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param server: Required. Server name for connection. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param schema: Schema name for connection. Type: string (or Expression with resultType string). - :type schema: object + :type schema: any :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -38059,18 +39491,18 @@ class SybaseLinkedService(LinkedService): def __init__( self, *, - server: object, - database: object, - additional_properties: Optional[Dict[str, object]] = None, + server: Any, + database: Any, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - schema: Optional[object] = None, + annotations: Optional[List[Any]] = None, + schema: Optional[Any] = None, authentication_type: Optional[Union[str, "SybaseAuthenticationType"]] = None, - username: Optional[object] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -38091,26 +39523,29 @@ class SybaseSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -38123,6 +39558,7 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -38131,16 +39567,17 @@ class SybaseSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SybaseSource' # type: str self.query = query @@ -38152,28 +39589,28 @@ class SybaseTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The Sybase table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -38198,14 +39635,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -38220,33 +39657,33 @@ class TabularTranslator(CopyTranslator): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy translator type.Constant filled by server. :type type: str :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. - :type column_mappings: object + :type column_mappings: any :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. - :type schema_mapping: object + :type schema_mapping: any :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). - :type collection_reference: object + :type collection_reference: any :param map_complex_values_to_string: Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). - :type map_complex_values_to_string: object + :type map_complex_values_to_string: any :param mappings: Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). - :type mappings: object + :type mappings: any :param type_conversion: Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). - :type type_conversion: object + :type type_conversion: any :param type_conversion_settings: Type conversion settings. :type type_conversion_settings: ~azure.mgmt.datafactory.models.TypeConversionSettings """ @@ -38270,13 +39707,13 @@ class TabularTranslator(CopyTranslator): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - column_mappings: Optional[object] = None, - schema_mapping: Optional[object] = None, - collection_reference: Optional[object] = None, - map_complex_values_to_string: Optional[object] = None, - mappings: Optional[object] = None, - type_conversion: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + column_mappings: Optional[Any] = None, + schema_mapping: Optional[Any] = None, + collection_reference: Optional[Any] = None, + map_complex_values_to_string: Optional[Any] = None, + mappings: Optional[Any] = None, + type_conversion: Optional[Any] = None, type_conversion_settings: Optional["TypeConversionSettings"] = None, **kwargs ): @@ -38298,12 +39735,12 @@ class TarGZipReadSettings(CompressionReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: object + :type preserve_compression_file_name_as_folder: any """ _validation = { @@ -38319,8 +39756,8 @@ class TarGZipReadSettings(CompressionReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - preserve_compression_file_name_as_folder: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + preserve_compression_file_name_as_folder: Optional[Any] = None, **kwargs ): super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -38335,12 +39772,12 @@ class TarReadSettings(CompressionReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_compression_file_name_as_folder: object + :type preserve_compression_file_name_as_folder: any """ _validation = { @@ -38356,8 +39793,8 @@ class TarReadSettings(CompressionReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - preserve_compression_file_name_as_folder: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + preserve_compression_file_name_as_folder: Optional[Any] = None, **kwargs ): super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -38372,7 +39809,7 @@ class TeradataLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -38382,24 +39819,24 @@ class TeradataLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param server: Server name for connection. Type: string (or Expression with resultType string). - :type server: object + :type server: any :param authentication_type: AuthenticationType to be used for connection. Possible values include: "Basic", "Windows". :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -38424,17 +39861,17 @@ class TeradataLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - server: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, + server: Optional[Any] = None, authentication_type: Optional[Union[str, "TeradataAuthenticationType"]] = None, - username: Optional[object] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -38452,15 +39889,15 @@ class TeradataPartitionSettings(msrest.serialization.Model): :param partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). - :type partition_column_name: object + :type partition_column_name: any :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_upper_bound: object + :type partition_upper_bound: any :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). - :type partition_lower_bound: object + :type partition_lower_bound: any """ _attribute_map = { @@ -38472,9 +39909,9 @@ class TeradataPartitionSettings(msrest.serialization.Model): def __init__( self, *, - partition_column_name: Optional[object] = None, - partition_upper_bound: Optional[object] = None, - partition_lower_bound: Optional[object] = None, + partition_column_name: Optional[Any] = None, + partition_upper_bound: Optional[Any] = None, + partition_lower_bound: Optional[Any] = None, **kwargs ): super(TeradataPartitionSettings, self).__init__(**kwargs) @@ -38490,29 +39927,32 @@ class TeradataSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). - :type query: object + :type query: any :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: object + :type partition_option: any :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings @@ -38528,6 +39968,7 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -38538,18 +39979,19 @@ class TeradataSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, - partition_option: Optional[object] = None, + query: Optional[Any] = None, + partition_option: Optional[Any] = None, partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'TeradataSource' # type: str self.query = query self.partition_option = partition_option @@ -38563,31 +40005,31 @@ class TeradataTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param database: The database name of Teradata. Type: string (or Expression with resultType string). - :type database: object + :type database: any :param table: The table name of Teradata. Type: string (or Expression with resultType string). - :type table: object + :type table: any """ _validation = { @@ -38613,15 +40055,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - database: Optional[object] = None, - table: Optional[object] = None, + database: Optional[Any] = None, + table: Optional[Any] = None, **kwargs ): super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -38637,40 +40079,40 @@ class TextFormat(DatasetStorageFormat): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str :param serializer: Serializer. Type: string (or Expression with resultType string). - :type serializer: object + :type serializer: any :param deserializer: Deserializer. Type: string (or Expression with resultType string). - :type deserializer: object + :type deserializer: any :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). - :type column_delimiter: object + :type column_delimiter: any :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object + :type row_delimiter: any :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object + :type escape_char: any :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object + :type quote_char: any :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any :param encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). - :type treat_empty_as_null: object + :type treat_empty_as_null: any :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). - :type skip_line_count: object + :type skip_line_count: any :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object + :type first_row_as_header: any """ _validation = { @@ -38696,18 +40138,18 @@ class TextFormat(DatasetStorageFormat): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - serializer: Optional[object] = None, - deserializer: Optional[object] = None, - column_delimiter: Optional[object] = None, - row_delimiter: Optional[object] = None, - escape_char: Optional[object] = None, - quote_char: Optional[object] = None, - null_value: Optional[object] = None, - encoding_name: Optional[object] = None, - treat_empty_as_null: Optional[object] = None, - skip_line_count: Optional[object] = None, - first_row_as_header: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + serializer: Optional[Any] = None, + deserializer: Optional[Any] = None, + column_delimiter: Optional[Any] = None, + row_delimiter: Optional[Any] = None, + escape_char: Optional[Any] = None, + quote_char: Optional[Any] = None, + null_value: Optional[Any] = None, + encoding_name: Optional[Any] = None, + treat_empty_as_null: Optional[Any] = None, + skip_line_count: Optional[Any] = None, + first_row_as_header: Optional[Any] = None, **kwargs ): super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) @@ -38828,7 +40270,7 @@ class TriggerPipelineReference(msrest.serialization.Model): :param pipeline_reference: Pipeline reference. :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. - :type parameters: dict[str, object] + :type parameters: dict[str, any] """ _attribute_map = { @@ -38840,7 +40282,7 @@ def __init__( self, *, pipeline_reference: Optional["PipelineReference"] = None, - parameters: Optional[Dict[str, object]] = None, + parameters: Optional[Dict[str, Any]] = None, **kwargs ): super(TriggerPipelineReference, self).__init__(**kwargs) @@ -38888,7 +40330,7 @@ class TriggerReference(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. Trigger reference type. Default value: "TriggerReference". + :ivar type: Trigger reference type. Has constant value: "TriggerReference". :vartype type: str :param reference_name: Required. Reference trigger name. :type reference_name: str @@ -38968,7 +40410,7 @@ class TriggerRun(msrest.serialization.Model): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :ivar trigger_run_id: Trigger run id. :vartype trigger_run_id: str :ivar trigger_name: Trigger name. @@ -38989,7 +40431,7 @@ class TriggerRun(msrest.serialization.Model): :ivar run_dimension: Run dimension for which trigger was fired. :vartype run_dimension: dict[str, str] :ivar dependency_status: Status of the upstream pipelines. - :vartype dependency_status: dict[str, object] + :vartype dependency_status: dict[str, any] """ _validation = { @@ -39022,7 +40464,7 @@ class TriggerRun(msrest.serialization.Model): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, **kwargs ): super(TriggerRun, self).__init__(**kwargs) @@ -39112,7 +40554,7 @@ class TumblingWindowTrigger(Trigger): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Trigger type.Constant filled by server. :type type: str :param description: Trigger description. @@ -39121,7 +40563,7 @@ class TumblingWindowTrigger(Trigger): called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :type annotations: list[any] :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference @@ -39140,7 +40582,7 @@ class TumblingWindowTrigger(Trigger): :param delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object + :type delay: any :param max_concurrency: Required. The max number of parallel time windows (ready for execution) for which a new run is triggered. :type max_concurrency: int @@ -39186,11 +40628,11 @@ def __init__( interval: int, start_time: datetime.datetime, max_concurrency: int, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, end_time: Optional[datetime.datetime] = None, - delay: Optional[object] = None, + delay: Optional[Any] = None, retry_policy: Optional["RetryPolicy"] = None, depends_on: Optional[List["DependencyReference"]] = None, **kwargs @@ -39258,22 +40700,22 @@ class TypeConversionSettings(msrest.serialization.Model): :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). - :type allow_data_truncation: object + :type allow_data_truncation: any :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). - :type treat_boolean_as_number: object + :type treat_boolean_as_number: any :param date_time_format: The format for DateTime values. Type: string (or Expression with resultType string). - :type date_time_format: object + :type date_time_format: any :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or Expression with resultType string). - :type date_time_offset_format: object + :type date_time_offset_format: any :param time_span_format: The format for TimeSpan values. Type: string (or Expression with resultType string). - :type time_span_format: object + :type time_span_format: any :param culture: The culture used to convert data from/to string. Type: string (or Expression with resultType string). - :type culture: object + :type culture: any """ _attribute_map = { @@ -39288,12 +40730,12 @@ class TypeConversionSettings(msrest.serialization.Model): def __init__( self, *, - allow_data_truncation: Optional[object] = None, - treat_boolean_as_number: Optional[object] = None, - date_time_format: Optional[object] = None, - date_time_offset_format: Optional[object] = None, - time_span_format: Optional[object] = None, - culture: Optional[object] = None, + allow_data_truncation: Optional[Any] = None, + treat_boolean_as_number: Optional[Any] = None, + date_time_format: Optional[Any] = None, + date_time_offset_format: Optional[Any] = None, + time_span_format: Optional[Any] = None, + culture: Optional[Any] = None, **kwargs ): super(TypeConversionSettings, self).__init__(**kwargs) @@ -39305,14 +40747,14 @@ def __init__( self.culture = culture -class UntilActivity(Activity): +class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -39331,7 +40773,7 @@ class UntilActivity(Activity): Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object + :type timeout: any :param activities: Required. List of activities to execute. :type activities: list[~azure.mgmt.datafactory.models.Activity] """ @@ -39361,11 +40803,11 @@ def __init__( name: str, expression: "Expression", activities: List["Activity"], - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, - timeout: Optional[object] = None, + timeout: Optional[Any] = None, **kwargs ): super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) @@ -39484,7 +40926,7 @@ class UserProperty(msrest.serialization.Model): :type name: str :param value: Required. User property value. Type: string (or Expression with resultType string). - :type value: object + :type value: any """ _validation = { @@ -39501,7 +40943,7 @@ def __init__( self, *, name: str, - value: object, + value: Any, **kwargs ): super(UserProperty, self).__init__(**kwargs) @@ -39509,14 +40951,14 @@ def __init__( self.value = value -class ValidationActivity(Activity): +class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -39531,17 +40973,17 @@ class ValidationActivity(Activity): it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object + :type timeout: any :param sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). - :type sleep: object + :type sleep: any :param minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). - :type minimum_size: object + :type minimum_size: any :param child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object + :type child_items: any :param dataset: Required. Validation activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -39571,14 +41013,14 @@ def __init__( *, name: str, dataset: "DatasetReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, - timeout: Optional[object] = None, - sleep: Optional[object] = None, - minimum_size: Optional[object] = None, - child_items: Optional[object] = None, + timeout: Optional[Any] = None, + sleep: Optional[Any] = None, + minimum_size: Optional[Any] = None, + child_items: Optional[Any] = None, **kwargs ): super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) @@ -39598,7 +41040,7 @@ class VariableSpecification(msrest.serialization.Model): :param type: Required. Variable type. Possible values include: "String", "Bool", "Array". :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. - :type default_value: object + :type default_value: any """ _validation = { @@ -39614,7 +41056,7 @@ def __init__( self, *, type: Union[str, "VariableType"], - default_value: Optional[object] = None, + default_value: Optional[Any] = None, **kwargs ): super(VariableSpecification, self).__init__(**kwargs) @@ -39629,7 +41071,7 @@ class VerticaLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -39639,16 +41081,16 @@ class VerticaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object + :type connection_string: any :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -39670,14 +41112,14 @@ class VerticaLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_string: Optional[Any] = None, pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -39694,27 +41136,30 @@ class VerticaSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -39727,6 +41172,7 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -39735,16 +41181,17 @@ class VerticaSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'VerticaSource' # type: str self.query = query @@ -39756,35 +41203,35 @@ class VerticaTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: This property will be retired. Please consider using schema + table properties instead. - :type table_name: object + :type table_name: any :param table: The table name of the Vertica. Type: string (or Expression with resultType string). - :type table: object + :type table: any :param schema_type_properties_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type schema_type_properties_schema: any """ _validation = { @@ -39811,16 +41258,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + table_name: Optional[Any] = None, + table: Optional[Any] = None, + schema_type_properties_schema: Optional[Any] = None, **kwargs ): super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -39830,14 +41277,14 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema -class WaitActivity(Activity): +class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -39849,7 +41296,7 @@ class WaitActivity(Activity): :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: object + :type wait_time_in_seconds: any """ _validation = { @@ -39872,8 +41319,8 @@ def __init__( self, *, name: str, - wait_time_in_seconds: object, - additional_properties: Optional[Dict[str, object]] = None, + wait_time_in_seconds: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, @@ -39891,7 +41338,7 @@ class WebActivity(ExecutionActivity): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -39911,14 +41358,14 @@ class WebActivity(ExecutionActivity): :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod :param url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: object + :type headers: any :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :type body: any :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param datasets: List of datasets passed to web endpoint. @@ -39960,15 +41407,15 @@ def __init__( *, name: str, method: Union[str, "WebActivityMethod"], - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - headers: Optional[object] = None, - body: Optional[object] = None, + headers: Optional[Any] = None, + body: Optional[Any] = None, authentication: Optional["WebActivityAuthentication"] = None, datasets: Optional[List["DatasetReference"]] = None, linked_services: Optional[List["LinkedServiceReference"]] = None, @@ -40000,16 +41447,16 @@ class WebActivityAuthentication(msrest.serialization.Model): :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic authentication or ClientID when used for ServicePrincipal. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Password for the PFX file or basic authentication / Secret when used for ServicePrincipal. :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. Type: string (or Expression with resultType string). - :type resource: object + :type resource: any :param user_tenant: TenantId for which Azure Auth token will be requested when using ServicePrincipal Authentication. Type: string (or Expression with resultType string). - :type user_tenant: object + :type user_tenant: any """ _validation = { @@ -40030,10 +41477,10 @@ def __init__( *, type: str, pfx: Optional["SecretBase"] = None, - username: Optional[object] = None, + username: Optional[Any] = None, password: Optional["SecretBase"] = None, - resource: Optional[object] = None, - user_tenant: Optional[object] = None, + resource: Optional[Any] = None, + user_tenant: Optional[Any] = None, **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) @@ -40055,7 +41502,7 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". @@ -40079,7 +41526,7 @@ class WebLinkedServiceTypeProperties(msrest.serialization.Model): def __init__( self, *, - url: object, + url: Any, **kwargs ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) @@ -40094,7 +41541,7 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". @@ -40114,7 +41561,7 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): def __init__( self, *, - url: object, + url: Any, **kwargs ): super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) @@ -40128,14 +41575,14 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". :type authentication_type: str or ~azure.mgmt.datafactory.models.WebAuthenticationType :param username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). - :type username: object + :type username: any :param password: Required. The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -40157,8 +41604,8 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): def __init__( self, *, - url: object, - username: object, + url: Any, + username: Any, password: "SecretBase", **kwargs ): @@ -40175,7 +41622,7 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). - :type url: object + :type url: any :param authentication_type: Required. Type of authentication used to connect to the web table source.Constant filled by server. Possible values include: "Basic", "Anonymous", "ClientCertificate". @@ -40203,7 +41650,7 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): def __init__( self, *, - url: object, + url: Any, pfx: "SecretBase", password: "SecretBase", **kwargs @@ -40214,14 +41661,14 @@ def __init__( self.password = password -class WebHookActivity(Activity): +class WebHookActivity(ControlActivity): """WebHook activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param name: Required. Activity name. :type name: str :param type: Required. Type of activity.Constant filled by server. @@ -40236,7 +41683,7 @@ class WebHookActivity(Activity): :type method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). - :type url: object + :type url: any :param timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). @@ -40244,17 +41691,17 @@ class WebHookActivity(Activity): :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :type headers: object + :type headers: any :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :type body: any :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - :type report_status_on_call_back: object + :type report_status_on_call_back: any """ _validation = { @@ -40285,16 +41732,16 @@ def __init__( *, name: str, method: Union[str, "WebHookActivityMethod"], - url: object, - additional_properties: Optional[Dict[str, object]] = None, + url: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, timeout: Optional[str] = None, - headers: Optional[object] = None, - body: Optional[object] = None, + headers: Optional[Any] = None, + body: Optional[Any] = None, authentication: Optional["WebActivityAuthentication"] = None, - report_status_on_call_back: Optional[object] = None, + report_status_on_call_back: Optional[Any] = None, **kwargs ): super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) @@ -40315,7 +41762,7 @@ class WebLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -40325,7 +41772,7 @@ class WebLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param type_properties: Required. Web linked service properties. :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ @@ -40349,11 +41796,11 @@ def __init__( self, *, type_properties: "WebLinkedServiceTypeProperties", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, **kwargs ): super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -40368,18 +41815,21 @@ class WebSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] @@ -40395,20 +41845,22 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'WebSource' # type: str self.additional_columns = additional_columns @@ -40420,32 +41872,32 @@ class WebTableDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object + :type index: any :param path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). - :type path: object + :type path: any """ _validation = { @@ -40472,15 +41924,15 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - index: object, - additional_properties: Optional[Dict[str, object]] = None, + index: Any, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - path: Optional[object] = None, + path: Optional[Any] = None, **kwargs ): super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -40496,7 +41948,7 @@ class XeroLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -40506,12 +41958,12 @@ class XeroLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param host: The endpoint of the Xero server. (i.e. api.xero.com). - :type host: object + :type host: any :param consumer_key: The consumer key associated with the Xero application. :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase :param private_key: The private key from the .pem file that was generated for your Xero private @@ -40520,18 +41972,18 @@ class XeroLinkedService(LinkedService): :type private_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -40558,19 +42010,19 @@ class XeroLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_properties: Optional[object] = None, - host: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_properties: Optional[Any] = None, + host: Optional[Any] = None, consumer_key: Optional["SecretBase"] = None, private_key: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -40592,28 +42044,28 @@ class XeroObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -40638,14 +42090,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -40660,27 +42112,30 @@ class XeroSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -40693,6 +42148,7 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -40701,16 +42157,17 @@ class XeroSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'XeroSource' # type: str self.query = query @@ -40722,23 +42179,23 @@ class XmlDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder @@ -40749,9 +42206,9 @@ class XmlDataset(Dataset): of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). - :type encoding_name: object + :type encoding_name: any :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type null_value: any :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ @@ -40781,16 +42238,16 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - encoding_name: Optional[object] = None, - null_value: Optional[object] = None, + encoding_name: Optional[Any] = None, + null_value: Optional[Any] = None, compression: Optional["DatasetCompression"] = None, **kwargs ): @@ -40809,25 +42266,25 @@ class XmlReadSettings(FormatReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The read setting type.Constant filled by server. :type type: str :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). - :type validation_mode: object + :type validation_mode: any :param detect_data_type: Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :type detect_data_type: object + :type detect_data_type: any :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). - :type namespaces: object + :type namespaces: any :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). - :type namespace_prefixes: object + :type namespace_prefixes: any """ _validation = { @@ -40847,12 +42304,12 @@ class XmlReadSettings(FormatReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, compression_properties: Optional["CompressionReadSettings"] = None, - validation_mode: Optional[object] = None, - detect_data_type: Optional[object] = None, - namespaces: Optional[object] = None, - namespace_prefixes: Optional[object] = None, + validation_mode: Optional[Any] = None, + detect_data_type: Optional[Any] = None, + namespaces: Optional[Any] = None, + namespace_prefixes: Optional[Any] = None, **kwargs ): super(XmlReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -40871,18 +42328,21 @@ class XmlSource(CopySource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param store_settings: Xml store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Xml format settings. @@ -40902,6 +42362,7 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -40910,16 +42371,17 @@ class XmlSource(CopySource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["XmlReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'XmlSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -40933,12 +42395,12 @@ class ZipDeflateReadSettings(CompressionReadSettings): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. The Compression setting type.Constant filled by server. :type type: str :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). - :type preserve_zip_file_name_as_folder: object + :type preserve_zip_file_name_as_folder: any """ _validation = { @@ -40954,8 +42416,8 @@ class ZipDeflateReadSettings(CompressionReadSettings): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - preserve_zip_file_name_as_folder: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + preserve_zip_file_name_as_folder: Optional[Any] = None, **kwargs ): super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -40970,7 +42432,7 @@ class ZohoLinkedService(LinkedService): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. @@ -40980,28 +42442,28 @@ class ZohoLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] + :type annotations: list[any] :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. - :type connection_properties: object + :type connection_properties: any :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). - :type endpoint: object + :type endpoint: any :param access_token: The access token for Zoho authentication. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :type use_encrypted_endpoints: any :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. - :type use_host_verification: object + :type use_host_verification: any :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. - :type use_peer_verification: object + :type use_peer_verification: any :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: any """ _validation = { @@ -41027,18 +42489,18 @@ class ZohoLinkedService(LinkedService): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_properties: Optional[object] = None, - endpoint: Optional[object] = None, + annotations: Optional[List[Any]] = None, + connection_properties: Optional[Any] = None, + endpoint: Optional[Any] = None, access_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + use_encrypted_endpoints: Optional[Any] = None, + use_host_verification: Optional[Any] = None, + use_peer_verification: Optional[Any] = None, + encrypted_credential: Optional[Any] = None, **kwargs ): super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) @@ -41059,28 +42521,28 @@ class ZohoObjectDataset(Dataset): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Type of dataset.Constant filled by server. :type type: str :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object + :type structure: any :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object + :type schema: any :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] + :type annotations: list[any] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :type table_name: any """ _validation = { @@ -41105,14 +42567,14 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, Any]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + structure: Optional[Any] = None, + schema: Optional[Any] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + annotations: Optional[List[Any]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + table_name: Optional[Any] = None, **kwargs ): super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -41127,27 +42589,30 @@ class ZohoSource(TabularSource): :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] + :type additional_properties: dict[str, any] :param type: Required. Copy source type.Constant filled by server. :type type: str :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). - :type source_retry_count: object + :type source_retry_count: any :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object + :type source_retry_wait: any :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :type max_concurrent_connections: any + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: any :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object + :type query_timeout: any :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type query: object + :type query: any """ _validation = { @@ -41160,6 +42625,7 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -41168,15 +42634,16 @@ class ZohoSource(TabularSource): def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, + additional_properties: Optional[Dict[str, Any]] = None, + source_retry_count: Optional[Any] = None, + source_retry_wait: Optional[Any] = None, + max_concurrent_connections: Optional[Any] = None, + disable_metrics_collection: Optional[Any] = None, + query_timeout: Optional[Any] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, - query: Optional[object] = None, + query: Optional[Any] = None, **kwargs ): - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ZohoSource' # type: str self.query = query diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 95d268579097..c1da8c996a37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -23,6 +23,9 @@ from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations +from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations +from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations __all__ = [ 'Operations', @@ -42,4 +45,7 @@ 'DataFlowDebugSessionOperations', 'ManagedVirtualNetworksOperations', 'ManagedPrivateEndpointsOperations', + 'PrivateEndPointConnectionsOperations', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourcesOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py index d0fab6301f48..84375fcbae30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -125,8 +125,8 @@ def begin_create( :type request: ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) @@ -463,8 +463,8 @@ def begin_execute_command( :type request: ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py index 946dc71cc0b8..0a38967fb5e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -116,8 +116,8 @@ def begin_refresh( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py index 355d49199c50..ed84d9bb134d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py @@ -465,6 +465,69 @@ def get_status( return deserialized get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + def outbound_network_dependencies_endpoints( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse" + """Gets the list of outbound network dependencies for a given Azure-SSIS integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.outbound_network_dependencies_endpoints.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/outboundNetworkDependenciesEndpoints'} # type: ignore + def get_connection_info( self, resource_group_name, # type: str @@ -734,8 +797,8 @@ def begin_start( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) @@ -854,8 +917,8 @@ def begin_stop( :type integration_runtime_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 7a94f770ac76..7f972501d499 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -337,7 +337,7 @@ def create_run( is_recovery=None, # type: Optional[bool] start_activity_name=None, # type: Optional[str] start_from_failure=None, # type: Optional[bool] - parameters=None, # type: Optional[Dict[str, object]] + parameters=None, # type: Optional[Dict[str, Any]] **kwargs # type: Any ): # type: (...) -> "_models.CreateRunResponse" @@ -363,7 +363,7 @@ def create_run( :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. - :type parameters: dict[str, object] + :type parameters: dict[str, any] :keyword callable cls: A custom type or function that will be passed the direct response :return: CreateRunResponse, or the result of cls(response) :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py new file mode 100644 index 000000000000..4f8b24284394 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndPointConnectionsOperations(object): + """PrivateEndPointConnectionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PrivateEndpointConnectionListResponse"] + """Lists Private endpoint connections. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py new file mode 100644 index 000000000000..deb6a5acb95e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py @@ -0,0 +1,252 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionOperations(object): + """PrivateEndpointConnectionOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + private_endpoint_wrapper, # type: "_models.PrivateLinkConnectionApprovalRequestResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnectionResource" + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param private_endpoint_wrapper: + :type private_endpoint_wrapper: ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequestResource + :param if_match: ETag of the private endpoint connection entity. Should only be specified for + update, for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnectionResource" + """Gets a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :param if_none_match: ETag of the private endpoint connection entity. Should only be specified + for get. If the ETag matches the existing entity tag, or if * was provided, then no content + will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnectionResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnectionResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint connection name. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..125681ace87f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateLinkResourcesWrapper" + """Gets the private link resources. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourcesWrapper, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesWrapper"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourcesWrapper', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'} # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index 6b8c088bb279..cbc17f0eb3e8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -467,8 +467,8 @@ def begin_subscribe_to_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -655,8 +655,8 @@ def begin_unsubscribe_from_events( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) @@ -775,8 +775,8 @@ def begin_start( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -892,8 +892,8 @@ def begin_stop( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response)