Skip to content

Commit

Permalink
feat: added new Dataplex APIs and new features for existing APIs (e.g…
Browse files Browse the repository at this point in the history
…. DataScans) (#133)

* feat: added new Dataplex APIs and new features for existing APIs (e.g. DataScans)
docs: updated comments for multiple Dataplex APIs

PiperOrigin-RevId: 528906555

Source-Link: googleapis/googleapis@b7429bc

Source-Link: googleapis/googleapis-gen@5ddd68c
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWRkZDY4YzI3YzEzNDJhZTg4MjYzMjU5YjcwOTU2OWE3MGRmYjNjYyJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

---------

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed May 4, 2023
1 parent cd49971 commit 3f8f397
Show file tree
Hide file tree
Showing 25 changed files with 1,136 additions and 176 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@
ListPartitionsResponse,
Partition,
Schema,
StorageAccess,
StorageFormat,
StorageSystem,
UpdateEntityRequest,
Expand Down Expand Up @@ -148,6 +149,8 @@
ListZonesRequest,
ListZonesResponse,
OperationMetadata,
RunTaskRequest,
RunTaskResponse,
UpdateAssetRequest,
UpdateEnvironmentRequest,
UpdateLakeRequest,
Expand Down Expand Up @@ -212,6 +215,7 @@
"ListPartitionsResponse",
"Partition",
"Schema",
"StorageAccess",
"StorageFormat",
"UpdateEntityRequest",
"StorageSystem",
Expand Down Expand Up @@ -260,6 +264,8 @@
"ListZonesRequest",
"ListZonesResponse",
"OperationMetadata",
"RunTaskRequest",
"RunTaskResponse",
"UpdateAssetRequest",
"UpdateEnvironmentRequest",
"UpdateLakeRequest",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
ListPartitionsResponse,
Partition,
Schema,
StorageAccess,
StorageFormat,
StorageSystem,
UpdateEntityRequest,
Expand Down Expand Up @@ -116,6 +117,8 @@
ListZonesRequest,
ListZonesResponse,
OperationMetadata,
RunTaskRequest,
RunTaskResponse,
UpdateAssetRequest,
UpdateEnvironmentRequest,
UpdateLakeRequest,
Expand Down Expand Up @@ -217,11 +220,14 @@
"Partition",
"RunDataScanRequest",
"RunDataScanResponse",
"RunTaskRequest",
"RunTaskResponse",
"ScannedData",
"Schema",
"Session",
"SessionEvent",
"State",
"StorageAccess",
"StorageFormat",
"StorageSystem",
"Task",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,11 @@
"list_zones"
]
},
"RunTask": {
"methods": [
"run_task"
]
},
"UpdateAsset": {
"methods": [
"update_asset"
Expand Down Expand Up @@ -498,6 +503,11 @@
"list_zones"
]
},
"RunTask": {
"methods": [
"run_task"
]
},
"UpdateAsset": {
"methods": [
"update_asset"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1038,7 +1038,7 @@ async def sample_get_data_scan_job():
The request object. Get DataScanJob request.
name (:class:`str`):
Required. The resource name of the DataScanJob:
``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/dataScanJobs/{data_scan_job_id}``
``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}``
where ``project`` refers to a *project_id* or
*project_number* and ``location_id`` refers to a GCP
region.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1309,7 +1309,7 @@ def sample_get_data_scan_job():
The request object. Get DataScanJob request.
name (str):
Required. The resource name of the DataScanJob:
``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/dataScanJobs/{data_scan_job_id}``
``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}``
where ``project`` refers to a *project_id* or
*project_number* and ``location_id`` refers to a GCP
region.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3296,6 +3296,105 @@ async def sample_list_jobs():
# Done; return the response.
return response

async def run_task(
self,
request: Optional[Union[service.RunTaskRequest, dict]] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> service.RunTaskResponse:
r"""Run an on demand execution of a Task.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dataplex_v1
async def sample_run_task():
# Create a client
client = dataplex_v1.DataplexServiceAsyncClient()
# Initialize request argument(s)
request = dataplex_v1.RunTaskRequest(
name="name_value",
)
# Make the request
response = await client.run_task(request=request)
# Handle the response
print(response)
Args:
request (Optional[Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]]):
The request object.
name (:class:`str`):
Required. The resource name of the task:
``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dataplex_v1.types.RunTaskResponse:
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

request = service.RunTaskRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.run_task,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

async def get_job(
self,
request: Optional[Union[service.GetJobRequest, dict]] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3579,6 +3579,105 @@ def sample_list_jobs():
# Done; return the response.
return response

def run_task(
self,
request: Optional[Union[service.RunTaskRequest, dict]] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> service.RunTaskResponse:
r"""Run an on demand execution of a Task.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dataplex_v1
def sample_run_task():
# Create a client
client = dataplex_v1.DataplexServiceClient()
# Initialize request argument(s)
request = dataplex_v1.RunTaskRequest(
name="name_value",
)
# Make the request
response = client.run_task(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dataplex_v1.types.RunTaskRequest, dict]):
The request object.
name (str):
Required. The resource name of the task:
``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dataplex_v1.types.RunTaskResponse:
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

# Minor optimization to avoid making a copy if the user passes
# in a service.RunTaskRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.RunTaskRequest):
request = service.RunTaskRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.run_task]

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

def get_job(
self,
request: Optional[Union[service.GetJobRequest, dict]] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,11 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
self.run_task: gapic_v1.method.wrap_method(
self.run_task,
default_timeout=None,
client_info=client_info,
),
self.get_job: gapic_v1.method.wrap_method(
self.get_job,
default_retry=retries.Retry(
Expand Down Expand Up @@ -648,6 +653,15 @@ def list_jobs(
]:
raise NotImplementedError()

@property
def run_task(
self,
) -> Callable[
[service.RunTaskRequest],
Union[service.RunTaskResponse, Awaitable[service.RunTaskResponse]],
]:
raise NotImplementedError()

@property
def get_job(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -873,6 +873,30 @@ def list_jobs(
)
return self._stubs["list_jobs"]

@property
def run_task(self) -> Callable[[service.RunTaskRequest], service.RunTaskResponse]:
r"""Return a callable for the run task method over gRPC.
Run an on demand execution of a Task.
Returns:
Callable[[~.RunTaskRequest],
~.RunTaskResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "run_task" not in self._stubs:
self._stubs["run_task"] = self.grpc_channel.unary_unary(
"/google.cloud.dataplex.v1.DataplexService/RunTask",
request_serializer=service.RunTaskRequest.serialize,
response_deserializer=service.RunTaskResponse.deserialize,
)
return self._stubs["run_task"]

@property
def get_job(self) -> Callable[[service.GetJobRequest], tasks.Job]:
r"""Return a callable for the get job method over gRPC.
Expand Down
Loading

0 comments on commit 3f8f397

Please sign in to comment.