From 7edf52296c08688e22db963af97b191fc850e091 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Fri, 19 Feb 2021 07:50:20 +0000 Subject: [PATCH 1/2] CodeGen from PR 12883 in Azure/azure-rest-api-specs Add pipeline policy and Elapsed Time Metric (#12883) * Add pipeline policy and ElapsedTime Policy * adding missing objects * fix schema validation * fix prettier issues Co-authored-by: Kalyan C Kesireddy --- .../azure/mgmt/datafactory/models/__init__.py | 12 + .../azure/mgmt/datafactory/models/_models.py | 221 +++++++++++++++- .../mgmt/datafactory/models/_models_py3.py | 235 ++++++++++++++++-- 3 files changed, 437 insertions(+), 31 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index c0aab6b04cea..987622ae0455 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -159,6 +159,7 @@ from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataset from ._models_py3 import CustomDataSourceLinkedService + from ._models_py3 import CustomEventsTrigger from ._models_py3 import CustomSetupBase from ._models_py3 import DatabricksNotebookActivity from ._models_py3 import DatabricksSparkJarActivity @@ -371,6 +372,7 @@ from ._models_py3 import ManagedPrivateEndpoint from ._models_py3 import ManagedPrivateEndpointResource from ._models_py3 import ManagedVirtualNetwork + from ._models_py3 import ManagedVirtualNetworkReference from ._models_py3 import ManagedVirtualNetworkResource from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService @@ -444,7 +446,9 @@ from ._models_py3 import PhoenixLinkedService from ._models_py3 import PhoenixObjectDataset from ._models_py3 import PhoenixSource + from ._models_py3 import PipelineElapsedTimeMetricPolicy from ._models_py3 import PipelineFolder + from ._models_py3 import PipelinePolicy from ._models_py3 import PipelineReference from ._models_py3 import PipelineResource from ._models_py3 import PipelineRun @@ -791,6 +795,7 @@ from ._models import CustomActivityReferenceObject from ._models import CustomDataset from ._models import CustomDataSourceLinkedService + from ._models import CustomEventsTrigger from ._models import CustomSetupBase from ._models import DatabricksNotebookActivity from ._models import DatabricksSparkJarActivity @@ -1003,6 +1008,7 @@ from ._models import ManagedPrivateEndpoint from ._models import ManagedPrivateEndpointResource from ._models import ManagedVirtualNetwork + from ._models import ManagedVirtualNetworkReference from ._models import ManagedVirtualNetworkResource from ._models import MappingDataFlow from ._models import MariaDBLinkedService @@ -1076,7 +1082,9 @@ from ._models import PhoenixLinkedService from ._models import PhoenixObjectDataset from ._models import PhoenixSource + from ._models import PipelineElapsedTimeMetricPolicy from ._models import PipelineFolder + from ._models import PipelinePolicy from ._models import PipelineReference from ._models import PipelineResource from ._models import PipelineRun @@ -1518,6 +1526,7 @@ 'CustomActivityReferenceObject', 'CustomDataset', 'CustomDataSourceLinkedService', + 'CustomEventsTrigger', 'CustomSetupBase', 'DatabricksNotebookActivity', 'DatabricksSparkJarActivity', @@ -1730,6 +1739,7 @@ 'ManagedPrivateEndpoint', 'ManagedPrivateEndpointResource', 'ManagedVirtualNetwork', + 'ManagedVirtualNetworkReference', 'ManagedVirtualNetworkResource', 'MappingDataFlow', 'MariaDBLinkedService', @@ -1803,7 +1813,9 @@ 'PhoenixLinkedService', 'PhoenixObjectDataset', 'PhoenixSource', + 'PipelineElapsedTimeMetricPolicy', 'PipelineFolder', + 'PipelinePolicy', 'PipelineReference', 'PipelineResource', 'PipelineRun', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index ebd28d0a36c0..42896972d1f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -3333,10 +3333,17 @@ class AzureDatabricksLinkedService(LinkedService): Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace + resource id for databricks REST API. Type: string (or Expression with + resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST + API. Type: string (or Expression with resultType string). + :type workspace_resource_id: object :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). @@ -3394,12 +3401,15 @@ class AzureDatabricksLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure + clusters based on a user defined set of rules. Type: string (or Expression + with resultType string). + :type policy_id: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, - 'access_token': {'required': True}, } _attribute_map = { @@ -3411,6 +3421,8 @@ class AzureDatabricksLinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, @@ -3424,12 +3436,15 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDatabricksLinkedService, self).__init__(**kwargs) self.domain = kwargs.get('domain', None) self.access_token = kwargs.get('access_token', None) + self.authentication = kwargs.get('authentication', None) + self.workspace_resource_id = kwargs.get('workspace_resource_id', None) self.existing_cluster_id = kwargs.get('existing_cluster_id', None) self.instance_pool_id = kwargs.get('instance_pool_id', None) self.new_cluster_version = kwargs.get('new_cluster_version', None) @@ -3443,6 +3458,7 @@ def __init__(self, **kwargs): self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.policy_id = kwargs.get('policy_id', None) self.type = 'AzureDatabricks' @@ -5119,9 +5135,15 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. - Type: string (or Expression with resultType string). + :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string + (or Expression with resultType string). :type ml_pipeline_id: object + :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline + endpoint. Type: string (or Expression with resultType string). + :type ml_pipeline_endpoint_id: object + :param version: Version of the published Azure ML pipeline endpoint. Type: + string (or Expression with resultType string). + :type version: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with @@ -5134,6 +5156,11 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object + :param data_path_assignments: Dictionary used for changing data path + assignments without retraining. Values will be passed in the + dataPathAssignments property of the published pipeline execution request. + Type: object with key value pairs (or Expression with resultType object). + :type data_path_assignments: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType @@ -5149,7 +5176,6 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -5162,8 +5188,11 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } @@ -5171,8 +5200,11 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): def __init__(self, **kwargs): super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) self.ml_pipeline_id = kwargs.get('ml_pipeline_id', None) + self.ml_pipeline_endpoint_id = kwargs.get('ml_pipeline_endpoint_id', None) + self.version = kwargs.get('version', None) self.experiment_name = kwargs.get('experiment_name', None) self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) + self.data_path_assignments = kwargs.get('data_path_assignments', None) self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) self.type = 'AzureMLExecutePipeline' @@ -7388,7 +7420,8 @@ class MultiplePipelineTrigger(Trigger): pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + sub-classes are: CustomEventsTrigger, BlobEventsTrigger, BlobTrigger, + ScheduleTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -7430,7 +7463,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'CustomEventsTrigger': 'CustomEventsTrigger', 'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__(self, **kwargs): @@ -9697,6 +9730,10 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, + default is nonadmin task. Type: string (or Expression with resultType + double). + :type auto_user_specification: object """ _validation = { @@ -9720,6 +9757,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__(self, **kwargs): @@ -9730,6 +9768,7 @@ def __init__(self, **kwargs): self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.auto_user_specification = kwargs.get('auto_user_specification', None) self.type = 'Custom' @@ -9859,6 +9898,76 @@ def __init__(self, **kwargs): self.type = 'CustomDataSource' +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern + provided for trigger to fire. At least one of these must be provided: + subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern + provided for trigger to fire. At least one of these must be provided: + subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger + to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CustomEventsTrigger, self).__init__(**kwargs) + self.subject_begins_with = kwargs.get('subject_begins_with', None) + self.subject_ends_with = kwargs.get('subject_ends_with', None) + self.events = kwargs.get('events', None) + self.scope = kwargs.get('scope', None) + self.type = 'CustomEventsTrigger' + + class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. @@ -18982,6 +19091,9 @@ class IntegrationRuntimeSsisCatalogInfo(Model): values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' :type catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS + Integration Runtimes to support SSISDB failover. + :type dual_standby_pair_name: str """ _validation = { @@ -18994,6 +19106,7 @@ class IntegrationRuntimeSsisCatalogInfo(Model): 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, } def __init__(self, **kwargs): @@ -19003,6 +19116,7 @@ def __init__(self, **kwargs): self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) self.catalog_admin_password = kwargs.get('catalog_admin_password', None) self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + self.dual_standby_pair_name = kwargs.get('dual_standby_pair_name', None) class IntegrationRuntimeSsisProperties(Model): @@ -20370,6 +20484,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :param ssis_properties: SSIS properties for managed integration runtime. :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: + ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference """ _validation = { @@ -20384,6 +20501,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'state': {'key': 'state', 'type': 'str'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, } def __init__(self, **kwargs): @@ -20391,6 +20509,7 @@ def __init__(self, **kwargs): self.state = None self.compute_properties = kwargs.get('compute_properties', None) self.ssis_properties = kwargs.get('ssis_properties', None) + self.managed_virtual_network = kwargs.get('managed_virtual_network', None) self.type = 'Managed' @@ -20723,6 +20842,38 @@ def __init__(self, **kwargs): self.alias = None +class ManagedVirtualNetworkReference(Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default + value: "ManagedVirtualNetworkReference" . + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__(self, **kwargs): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + + class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. @@ -24812,6 +24963,23 @@ def __init__(self, **kwargs): self.type = 'PhoenixSource' +class PipelineElapsedTimeMetricPolicy(Model): + """Pipeline ElapsedTime Metric Policy. + + :param duration: TimeSpan value, after which an Azure Monitoring Metric is + fired. + :type duration: object + """ + + _attribute_map = { + 'duration': {'key': 'duration', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + self.duration = kwargs.get('duration', None) + + class PipelineFolder(Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. @@ -24829,6 +24997,23 @@ def __init__(self, **kwargs): self.name = kwargs.get('name', None) +class PipelinePolicy(Model): + """Pipeline Policy. + + :param elapsed_time_metric: + :type elapsed_time_metric: + ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy + """ + + _attribute_map = { + 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + } + + def __init__(self, **kwargs): + super(PipelinePolicy, self).__init__(**kwargs) + self.elapsed_time_metric = kwargs.get('elapsed_time_metric', None) + + class PipelineReference(Model): """Pipeline reference type. @@ -24902,6 +25087,8 @@ class PipelineResource(SubResource): :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + :param policy: + :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ _validation = { @@ -24926,6 +25113,7 @@ class PipelineResource(SubResource): 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, } def __init__(self, **kwargs): @@ -24939,6 +25127,7 @@ def __init__(self, **kwargs): self.annotations = kwargs.get('annotations', None) self.run_dimensions = kwargs.get('run_dimensions', None) self.folder = kwargs.get('folder', None) + self.policy = kwargs.get('policy', None) class PipelineRun(Model): @@ -34528,18 +34717,24 @@ class WebActivityAuthentication(Model): All required parameters must be populated in order to send to Azure. :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) + (Basic/ClientCertificate/MSI/ServicePrincipal) :type type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file or Certificate when used + for ServicePrincipal :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic - authentication. + authentication or ClientID when used for ServicePrincipal :type username: str - :param password: Password for the PFX file or basic authentication. + :param password: Password for the PFX file or basic authentication / + Secret when used for ServicePrincipal :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. :type resource: str + :param user_tenant: TenantId for which Azure Auth token will be requested + when using ServicePrincipal Authentication. Type: string (or Expression + with resultType string). + :type user_tenant: object """ _validation = { @@ -34552,6 +34747,7 @@ class WebActivityAuthentication(Model): 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'str'}, + 'user_tenant': {'key': 'userTenant', 'type': 'object'}, } def __init__(self, **kwargs): @@ -34561,6 +34757,7 @@ def __init__(self, **kwargs): self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.resource = kwargs.get('resource', None) + self.user_tenant = kwargs.get('user_tenant', None) class WebLinkedServiceTypeProperties(Model): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 39da9fd2c468..f4386d2b9084 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -3333,10 +3333,17 @@ class AzureDatabricksLinkedService(LinkedService): Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace + resource id for databricks REST API. Type: string (or Expression with + resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST + API. Type: string (or Expression with resultType string). + :type workspace_resource_id: object :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). @@ -3394,12 +3401,15 @@ class AzureDatabricksLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure + clusters based on a user defined set of rules. Type: string (or Expression + with resultType string). + :type policy_id: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, - 'access_token': {'required': True}, } _attribute_map = { @@ -3411,6 +3421,8 @@ class AzureDatabricksLinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, @@ -3424,12 +3436,15 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, } - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, instance_pool_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_log_destination=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, domain, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, authentication=None, workspace_resource_id=None, existing_cluster_id=None, instance_pool_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_log_destination=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, policy_id=None, **kwargs) -> None: super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.domain = domain self.access_token = access_token + self.authentication = authentication + self.workspace_resource_id = workspace_resource_id self.existing_cluster_id = existing_cluster_id self.instance_pool_id = instance_pool_id self.new_cluster_version = new_cluster_version @@ -3443,6 +3458,7 @@ def __init__(self, *, domain, access_token, additional_properties=None, connect_ self.new_cluster_init_scripts = new_cluster_init_scripts self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential + self.policy_id = policy_id self.type = 'AzureDatabricks' @@ -5119,9 +5135,15 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. - Type: string (or Expression with resultType string). + :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string + (or Expression with resultType string). :type ml_pipeline_id: object + :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline + endpoint. Type: string (or Expression with resultType string). + :type ml_pipeline_endpoint_id: object + :param version: Version of the published Azure ML pipeline endpoint. Type: + string (or Expression with resultType string). + :type version: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with @@ -5134,6 +5156,11 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object + :param data_path_assignments: Dictionary used for changing data path + assignments without retraining. Values will be passed in the + dataPathAssignments property of the published pipeline execution request. + Type: object with key value pairs (or Expression with resultType object). + :type data_path_assignments: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType @@ -5149,7 +5176,6 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -5162,17 +5188,23 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } - def __init__(self, *, name: str, ml_pipeline_id, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, experiment_name=None, ml_pipeline_parameters=None, ml_parent_run_id=None, continue_on_step_failure=None, **kwargs) -> None: + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, ml_pipeline_id=None, ml_pipeline_endpoint_id=None, version=None, experiment_name=None, ml_pipeline_parameters=None, data_path_assignments=None, ml_parent_run_id=None, continue_on_step_failure=None, **kwargs) -> None: super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.ml_pipeline_id = ml_pipeline_id + self.ml_pipeline_endpoint_id = ml_pipeline_endpoint_id + self.version = version self.experiment_name = experiment_name self.ml_pipeline_parameters = ml_pipeline_parameters + self.data_path_assignments = data_path_assignments self.ml_parent_run_id = ml_parent_run_id self.continue_on_step_failure = continue_on_step_failure self.type = 'AzureMLExecutePipeline' @@ -7388,7 +7420,8 @@ class MultiplePipelineTrigger(Trigger): pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + sub-classes are: CustomEventsTrigger, BlobEventsTrigger, BlobTrigger, + ScheduleTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -7430,7 +7463,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'CustomEventsTrigger': 'CustomEventsTrigger', 'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: @@ -9697,6 +9730,10 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, + default is nonadmin task. Type: string (or Expression with resultType + double). + :type auto_user_specification: object """ _validation = { @@ -9720,9 +9757,10 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, auto_user_specification=None, **kwargs) -> None: super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command self.resource_linked_service = resource_linked_service @@ -9730,6 +9768,7 @@ def __init__(self, *, name: str, command, additional_properties=None, descriptio self.reference_objects = reference_objects self.extended_properties = extended_properties self.retention_time_in_days = retention_time_in_days + self.auto_user_specification = auto_user_specification self.type = 'Custom' @@ -9859,6 +9898,76 @@ def __init__(self, *, type_properties, additional_properties=None, connect_via=N self.type = 'CustomDataSource' +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern + provided for trigger to fire. At least one of these must be provided: + subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern + provided for trigger to fire. At least one of these must be provided: + subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger + to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, subject_begins_with: str=None, subject_ends_with: str=None, **kwargs) -> None: + super(CustomEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.subject_begins_with = subject_begins_with + self.subject_ends_with = subject_ends_with + self.events = events + self.scope = scope + self.type = 'CustomEventsTrigger' + + class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. @@ -18982,6 +19091,9 @@ class IntegrationRuntimeSsisCatalogInfo(Model): values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' :type catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS + Integration Runtimes to support SSISDB failover. + :type dual_standby_pair_name: str """ _validation = { @@ -18994,15 +19106,17 @@ class IntegrationRuntimeSsisCatalogInfo(Model): 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, dual_standby_pair_name: str=None, **kwargs) -> None: super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_server_endpoint = catalog_server_endpoint self.catalog_admin_user_name = catalog_admin_user_name self.catalog_admin_password = catalog_admin_password self.catalog_pricing_tier = catalog_pricing_tier + self.dual_standby_pair_name = dual_standby_pair_name class IntegrationRuntimeSsisProperties(Model): @@ -20370,6 +20484,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): :param ssis_properties: SSIS properties for managed integration runtime. :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: + ~azure.mgmt.datafactory.models.ManagedVirtualNetworkReference """ _validation = { @@ -20384,13 +20501,15 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'state': {'key': 'state', 'type': 'str'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, } - def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, managed_virtual_network=None, **kwargs) -> None: super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.state = None self.compute_properties = compute_properties self.ssis_properties = ssis_properties + self.managed_virtual_network = managed_virtual_network self.type = 'Managed' @@ -20723,6 +20842,38 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.alias = None +class ManagedVirtualNetworkReference(Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default + value: "ManagedVirtualNetworkReference" . + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__(self, *, reference_name: str, **kwargs) -> None: + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = reference_name + + class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. @@ -24812,6 +24963,23 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'PhoenixSource' +class PipelineElapsedTimeMetricPolicy(Model): + """Pipeline ElapsedTime Metric Policy. + + :param duration: TimeSpan value, after which an Azure Monitoring Metric is + fired. + :type duration: object + """ + + _attribute_map = { + 'duration': {'key': 'duration', 'type': 'object'}, + } + + def __init__(self, *, duration=None, **kwargs) -> None: + super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + self.duration = duration + + class PipelineFolder(Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. @@ -24829,6 +24997,23 @@ def __init__(self, *, name: str=None, **kwargs) -> None: self.name = name +class PipelinePolicy(Model): + """Pipeline Policy. + + :param elapsed_time_metric: + :type elapsed_time_metric: + ~azure.mgmt.datafactory.models.PipelineElapsedTimeMetricPolicy + """ + + _attribute_map = { + 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + } + + def __init__(self, *, elapsed_time_metric=None, **kwargs) -> None: + super(PipelinePolicy, self).__init__(**kwargs) + self.elapsed_time_metric = elapsed_time_metric + + class PipelineReference(Model): """Pipeline reference type. @@ -24902,6 +25087,8 @@ class PipelineResource(SubResource): :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + :param policy: + :type policy: ~azure.mgmt.datafactory.models.PipelinePolicy """ _validation = { @@ -24926,9 +25113,10 @@ class PipelineResource(SubResource): 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, } - def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, run_dimensions=None, folder=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, run_dimensions=None, folder=None, policy=None, **kwargs) -> None: super(PipelineResource, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description @@ -24939,6 +25127,7 @@ def __init__(self, *, additional_properties=None, description: str=None, activit self.annotations = annotations self.run_dimensions = run_dimensions self.folder = folder + self.policy = policy class PipelineRun(Model): @@ -34528,18 +34717,24 @@ class WebActivityAuthentication(Model): All required parameters must be populated in order to send to Azure. :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) + (Basic/ClientCertificate/MSI/ServicePrincipal) :type type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file or Certificate when used + for ServicePrincipal :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic - authentication. + authentication or ClientID when used for ServicePrincipal :type username: str - :param password: Password for the PFX file or basic authentication. + :param password: Password for the PFX file or basic authentication / + Secret when used for ServicePrincipal :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. :type resource: str + :param user_tenant: TenantId for which Azure Auth token will be requested + when using ServicePrincipal Authentication. Type: string (or Expression + with resultType string). + :type user_tenant: object """ _validation = { @@ -34552,15 +34747,17 @@ class WebActivityAuthentication(Model): 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'str'}, + 'user_tenant': {'key': 'userTenant', 'type': 'object'}, } - def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: + def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, user_tenant=None, **kwargs) -> None: super(WebActivityAuthentication, self).__init__(**kwargs) self.type = type self.pfx = pfx self.username = username self.password = password self.resource = resource + self.user_tenant = user_tenant class WebLinkedServiceTypeProperties(Model): From b544d8506a74938e7ae09d3715dc2d29ce66419f Mon Sep 17 00:00:00 2001 From: "Yan Zhang (WICRESOFT NORTH AMERICA LTD)" Date: Wed, 24 Feb 2021 17:37:25 +0800 Subject: [PATCH 2/2] test,version,CHANGELOG --- .../azure-mgmt-datafactory/CHANGELOG.md | 16 ++++++++++++++++ .../azure/mgmt/datafactory/version.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md b/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md index 76d0e0643e16..83a1e22031dd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md +++ b/sdk/datafactory/azure-mgmt-datafactory/CHANGELOG.md @@ -1,5 +1,21 @@ # Release History +## 0.16.0 (2021-02-24) + +**Features** + + - Model CustomActivity has a new parameter auto_user_specification + - Model PipelineResource has a new parameter policy + - Model WebActivityAuthentication has a new parameter user_tenant + - Model AzureMLExecutePipelineActivity has a new parameter version + - Model AzureMLExecutePipelineActivity has a new parameter ml_pipeline_endpoint_id + - Model AzureMLExecutePipelineActivity has a new parameter data_path_assignments + - Model AzureDatabricksLinkedService has a new parameter authentication + - Model AzureDatabricksLinkedService has a new parameter policy_id + - Model AzureDatabricksLinkedService has a new parameter workspace_resource_id + - Model ManagedIntegrationRuntime has a new parameter managed_virtual_network + - Model IntegrationRuntimeSsisCatalogInfo has a new parameter dual_standby_pair_name + ## 0.15.0 (2020-12-09) **Features** diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index 2f147316d9cb..f609d3c9511a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.15.0" +VERSION = "0.16.0"