diff --git a/.kokoro/continuous/datacatalog.cfg b/.kokoro/continuous/datacatalog.cfg
new file mode 100644
index 000000000000..fc0371e5eb11
--- /dev/null
+++ b/.kokoro/continuous/datacatalog.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "datacatalog"
+}
diff --git a/.kokoro/presubmit/datacatalog.cfg b/.kokoro/presubmit/datacatalog.cfg
new file mode 100644
index 000000000000..fc0371e5eb11
--- /dev/null
+++ b/.kokoro/presubmit/datacatalog.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "datacatalog"
+}
diff --git a/.kokoro/release/datacatalog.cfg b/.kokoro/release/datacatalog.cfg
new file mode 100644
index 000000000000..fc0371e5eb11
--- /dev/null
+++ b/.kokoro/release/datacatalog.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Tell the trampoline which build file to use.
+env_vars: {
+ key: "PACKAGE"
+ value: "datacatalog"
+}
diff --git a/datacatalog/.coveragerc b/datacatalog/.coveragerc
new file mode 100644
index 000000000000..6b9ab9da4a1b
--- /dev/null
+++ b/datacatalog/.coveragerc
@@ -0,0 +1,18 @@
+[run]
+branch = True
+
+[report]
+fail_under = 100
+show_missing = True
+exclude_lines =
+ # Re-enable the standard pragma
+ pragma: NO COVER
+ # Ignore debug-only repr
+ def __repr__
+ # Ignore abstract methods
+ raise NotImplementedError
+omit =
+ */gapic/*.py
+ */proto/*.py
+ */core/*.py
+ */site-packages/*.py
\ No newline at end of file
diff --git a/datacatalog/.flake8 b/datacatalog/.flake8
new file mode 100644
index 000000000000..61766fa84d02
--- /dev/null
+++ b/datacatalog/.flake8
@@ -0,0 +1,13 @@
+[flake8]
+ignore = E203, E266, E501, W503
+exclude =
+ # Exclude generated code.
+ **/proto/**
+ **/gapic/**
+ *_pb2.py
+
+ # Standard linting exemptions.
+ __pycache__,
+ .git,
+ *.pyc,
+ conf.py
diff --git a/datacatalog/CHANGELOG.md b/datacatalog/CHANGELOG.md
new file mode 100644
index 000000000000..ac9f528f3e95
--- /dev/null
+++ b/datacatalog/CHANGELOG.md
@@ -0,0 +1,5 @@
+# Changelog
+
+[PyPI History][1]
+
+[1]: https://pypi.org/project/google-cloud-datacatalog/#history
diff --git a/datacatalog/LICENSE b/datacatalog/LICENSE
new file mode 100644
index 000000000000..a8ee855de2aa
--- /dev/null
+++ b/datacatalog/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/datacatalog/MANIFEST.in b/datacatalog/MANIFEST.in
new file mode 100644
index 000000000000..9cbf175afe6b
--- /dev/null
+++ b/datacatalog/MANIFEST.in
@@ -0,0 +1,5 @@
+include README.rst LICENSE
+recursive-include google *.json *.proto
+recursive-include tests *
+global-exclude *.py[co]
+global-exclude __pycache__
diff --git a/datacatalog/README.rst b/datacatalog/README.rst
new file mode 100644
index 000000000000..4ee8e1547596
--- /dev/null
+++ b/datacatalog/README.rst
@@ -0,0 +1,79 @@
+Python Client for Google Cloud Data Catalog API (`Alpha`_)
+==========================================================
+
+`Google Cloud Data Catalog API`_: Google Cloud Data Catalog API provides features to attach metadata to
+Google Cloud Platform resources like BigQuery Tables. Key critical
+resources include:
+
+
Entries (Data Catalog representation of a cloud resource)
+
+- `Client Library Documentation`_
+- `Product Documentation`_
+
+.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst
+.. _Google Cloud Data Catalog API: https://cloud.google.com/data-catalog
+.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/datacatalog/usage.html
+.. _Product Documentation: https://cloud.google.com/data-catalog
+
+Quick Start
+-----------
+
+In order to use this library, you first need to go through the following steps:
+
+1. `Select or create a Cloud Platform project.`_
+2. `Enable billing for your project.`_
+3. `Enable the Google Cloud Data Catalog API.`_
+4. `Setup Authentication.`_
+
+.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
+.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
+.. _Enable the Google Cloud Data Catalog API.: https://cloud.google.com/data-catalog
+.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html
+
+Installation
+~~~~~~~~~~~~
+
+Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With `virtualenv`_, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
+
+
+Mac/Linux
+^^^^^^^^^
+
+.. code-block:: console
+
+ pip install virtualenv
+ virtualenv
+ source /bin/activate
+ /bin/pip install google-cloud-datacatalog
+
+
+Windows
+^^^^^^^
+
+.. code-block:: console
+
+ pip install virtualenv
+ virtualenv
+ \Scripts\activate
+ \Scripts\pip.exe install google-cloud-datacatalog
+
+Next Steps
+~~~~~~~~~~
+
+- Read the `Client Library Documentation`_ for Google Cloud Data Catalog API
+ API to see other available methods on the client.
+- Read the `Google Cloud Data Catalog API Product documentation`_ to learn
+ more about the product and see How-to Guides.
+- View this `repository’s main README`_ to see the full list of Cloud
+ APIs that we cover.
+
+.. _Google Cloud Data Catalog API Product documentation: https://cloud.google.com/data-catalog
+.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst
\ No newline at end of file
diff --git a/datacatalog/docs/changelog.md b/datacatalog/docs/changelog.md
new file mode 120000
index 000000000000..04c99a55caae
--- /dev/null
+++ b/datacatalog/docs/changelog.md
@@ -0,0 +1 @@
+../CHANGELOG.md
\ No newline at end of file
diff --git a/datacatalog/docs/conf.py b/datacatalog/docs/conf.py
new file mode 100644
index 000000000000..4d17da66de50
--- /dev/null
+++ b/datacatalog/docs/conf.py
@@ -0,0 +1,359 @@
+# -*- coding: utf-8 -*-
+#
+# google-cloud-datacatalog documentation build configuration file
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath(".."))
+
+__version__ = "0.1.0"
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = "1.6.3"
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosummary",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.coverage",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.todo",
+ "sphinx.ext.viewcode",
+]
+
+# autodoc/autosummary flags
+autoclass_content = "both"
+autodoc_default_flags = ["members"]
+autosummary_generate = True
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# Allow markdown includes (so releases.md can include CHANGLEOG.md)
+# http://www.sphinx-doc.org/en/master/markdown.html
+source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = [".rst", ".md"]
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = u"google-cloud-datacatalog"
+copyright = u"2017, Google"
+author = u"Google APIs"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The full version, including alpha/beta/rc tags.
+release = __version__
+# The short X.Y version.
+version = ".".join(release.split(".")[0:2])
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = "alabaster"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+ "description": "Google Cloud Client Libraries for Python",
+ "github_user": "googleapis",
+ "github_repo": "google-cloud-python",
+ "github_banner": True,
+ "font_family": "'Roboto', Georgia, sans",
+ "head_font_family": "'Roboto', Georgia, serif",
+ "code_font_family": "'Roboto Mono', 'Consolas', monospace",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# " v documentation".
+# html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = []
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+# html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+# html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+# html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "google-cloud-datacatalog-doc"
+
+# -- Options for warnings ------------------------------------------------------
+
+suppress_warnings = [
+ # Temporarily suppress this to avoid "more than one target found for
+ # cross-reference" warning, which are intractable for us to avoid while in
+ # a mono-repo.
+ # See https://github.com/sphinx-doc/sphinx/blob
+ # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
+ "ref.python"
+]
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #'papersize': 'letterpaper',
+ # The font size ('10pt', '11pt' or '12pt').
+ #'pointsize': '10pt',
+ # Additional stuff for the LaTeX preamble.
+ #'preamble': '',
+ # Latex figure (float) alignment
+ #'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (
+ master_doc,
+ "google-cloud-datacatalog.tex",
+ u"google-cloud-datacatalog Documentation",
+ author,
+ "manual",
+ )
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (
+ master_doc,
+ "google-cloud-datacatalog",
+ u"google-cloud-datacatalog Documentation",
+ [author],
+ 1,
+ )
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (
+ master_doc,
+ "google-cloud-datacatalog",
+ u"google-cloud-datacatalog Documentation",
+ author,
+ "google-cloud-datacatalog",
+ "GAPIC library for the {metadata.shortName} v1beta1 service",
+ "APIs",
+ )
+]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+ "python": ("http://python.readthedocs.org/en/latest/", None),
+ "gax": ("https://gax-python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None),
+ "google.api_core": (
+ "https://googleapis.github.io/google-cloud-python/latest",
+ None,
+ ),
+ "grpc": ("https://grpc.io/grpc/python/", None),
+ "requests": ("http://docs.python-requests.org/en/master/", None),
+ "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None),
+ "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
+}
+
+# Napoleon settings
+napoleon_google_docstring = True
+napoleon_numpy_docstring = True
+napoleon_include_private_with_doc = False
+napoleon_include_special_with_doc = True
+napoleon_use_admonition_for_examples = False
+napoleon_use_admonition_for_notes = False
+napoleon_use_admonition_for_references = False
+napoleon_use_ivar = False
+napoleon_use_param = True
+napoleon_use_rtype = True
diff --git a/datacatalog/docs/gapic/v1beta1/api.rst b/datacatalog/docs/gapic/v1beta1/api.rst
new file mode 100644
index 000000000000..4c56460c09d7
--- /dev/null
+++ b/datacatalog/docs/gapic/v1beta1/api.rst
@@ -0,0 +1,6 @@
+Client for Google Cloud Data Catalog API
+========================================
+
+.. automodule:: google.cloud.datacatalog_v1beta1
+ :members:
+ :inherited-members:
\ No newline at end of file
diff --git a/datacatalog/docs/gapic/v1beta1/types.rst b/datacatalog/docs/gapic/v1beta1/types.rst
new file mode 100644
index 000000000000..bcc6cefbd1f7
--- /dev/null
+++ b/datacatalog/docs/gapic/v1beta1/types.rst
@@ -0,0 +1,5 @@
+Types for Google Cloud Data Catalog API Client
+==============================================
+
+.. automodule:: google.cloud.datacatalog_v1beta1.types
+ :members:
\ No newline at end of file
diff --git a/datacatalog/docs/index.rst b/datacatalog/docs/index.rst
new file mode 100644
index 000000000000..66507f268314
--- /dev/null
+++ b/datacatalog/docs/index.rst
@@ -0,0 +1,22 @@
+.. include:: /../datacatalog/README.rst
+
+
+API Reference
+-------------
+
+.. toctree::
+ :maxdepth: 2
+
+ gapic/v1beta1/api
+ gapic/v1beta1/types
+
+
+Changelog
+---------
+
+For a list of all ``google-cloud-datacatalog`` releases:
+
+.. toctree::
+ :maxdepth: 2
+
+ changelog
diff --git a/datacatalog/google/__init__.py b/datacatalog/google/__init__.py
new file mode 100644
index 000000000000..8fcc60e2b9c6
--- /dev/null
+++ b/datacatalog/google/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+ import pkg_resources
+
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+
+ __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/datacatalog/google/cloud/__init__.py b/datacatalog/google/cloud/__init__.py
new file mode 100644
index 000000000000..8fcc60e2b9c6
--- /dev/null
+++ b/datacatalog/google/cloud/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+ import pkg_resources
+
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ import pkgutil
+
+ __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/datacatalog/google/cloud/datacatalog.py b/datacatalog/google/cloud/datacatalog.py
new file mode 100644
index 000000000000..4d18ae13f8ec
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+
+from google.cloud.datacatalog_v1beta1 import DataCatalogClient
+from google.cloud.datacatalog_v1beta1 import enums
+from google.cloud.datacatalog_v1beta1 import types
+
+__all__ = ("enums", "types", "DataCatalogClient")
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/__init__.py b/datacatalog/google/cloud/datacatalog_v1beta1/__init__.py
new file mode 100644
index 000000000000..b025cf9d805e
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/__init__.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+
+from google.cloud.datacatalog_v1beta1 import types
+from google.cloud.datacatalog_v1beta1.gapic import data_catalog_client
+from google.cloud.datacatalog_v1beta1.gapic import enums
+
+
+class DataCatalogClient(data_catalog_client.DataCatalogClient):
+ __doc__ = data_catalog_client.DataCatalogClient.__doc__
+ enums = enums
+
+
+__all__ = ("enums", "types", "DataCatalogClient")
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/__init__.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py
new file mode 100644
index 000000000000..b82775ec8730
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py
@@ -0,0 +1,272 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Accesses the google.cloud.datacatalog.v1beta1 DataCatalog API."""
+
+import pkg_resources
+import warnings
+
+from google.oauth2 import service_account
+import google.api_core.gapic_v1.client_info
+import google.api_core.gapic_v1.config
+import google.api_core.gapic_v1.method
+import google.api_core.grpc_helpers
+import google.api_core.path_template
+import google.api_core.protobuf_helpers
+import grpc
+
+from google.cloud.datacatalog_v1beta1.gapic import data_catalog_client_config
+from google.cloud.datacatalog_v1beta1.gapic import enums
+from google.cloud.datacatalog_v1beta1.gapic.transports import (
+ data_catalog_grpc_transport,
+)
+from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2
+from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2_grpc
+
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-datacatalog"
+).version
+
+
+class DataCatalogClient(object):
+ """
+ Cloud Data Catalog is a service that allows clients to discover,
+ manage, and understand their Google Cloud data resources.
+ """
+
+ SERVICE_ADDRESS = "datacatalog.googleapis.com:443"
+ """The default address of the service."""
+
+ # The name of the interface for this client. This is the key used to
+ # find the method configuration in the client_config dictionary.
+ _INTERFACE_NAME = "google.cloud.datacatalog.v1beta1.DataCatalog"
+
+ @classmethod
+ def from_service_account_file(cls, filename, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ DataCatalogClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @classmethod
+ def location_path(cls, project, location):
+ """Return a fully-qualified location string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/locations/{location}",
+ project=project,
+ location=location,
+ )
+
+ @classmethod
+ def entry_path(cls, project, location, entry_group, entry):
+ """Return a fully-qualified entry string."""
+ return google.api_core.path_template.expand(
+ "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}",
+ project=project,
+ location=location,
+ entry_group=entry_group,
+ entry=entry,
+ )
+
+ def __init__(
+ self,
+ transport=None,
+ channel=None,
+ credentials=None,
+ client_config=None,
+ client_info=None,
+ ):
+ """Constructor.
+
+ Args:
+ transport (Union[~.DataCatalogGrpcTransport,
+ Callable[[~.Credentials, type], ~.DataCatalogGrpcTransport]): A transport
+ instance, responsible for actually making the API calls.
+ The default transport uses the gRPC protocol.
+ This argument may also be a callable which returns a
+ transport instance. Callables will be sent the credentials
+ as the first argument and the default transport class as
+ the second argument.
+ channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
+ through which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is mutually exclusive with providing a
+ transport instance to ``transport``; doing so will raise
+ an exception.
+ client_config (dict): DEPRECATED. A dictionary of call options for
+ each method. If not specified, the default configuration is used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ """
+ # Raise deprecation warnings for things we want to go away.
+ if client_config is not None:
+ warnings.warn(
+ "The `client_config` argument is deprecated.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ client_config = data_catalog_client_config.config
+
+ if channel:
+ warnings.warn(
+ "The `channel` argument is deprecated; use " "`transport` instead.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+
+ # Instantiate the transport.
+ # The transport is responsible for handling serialization and
+ # deserialization and actually sending data to the service.
+ if transport:
+ if callable(transport):
+ self.transport = transport(
+ credentials=credentials,
+ default_class=data_catalog_grpc_transport.DataCatalogGrpcTransport,
+ )
+ else:
+ if credentials:
+ raise ValueError(
+ "Received both a transport instance and "
+ "credentials; these are mutually exclusive."
+ )
+ self.transport = transport
+ else:
+ self.transport = data_catalog_grpc_transport.DataCatalogGrpcTransport(
+ address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials
+ )
+
+ if client_info is None:
+ client_info = google.api_core.gapic_v1.client_info.ClientInfo(
+ gapic_version=_GAPIC_LIBRARY_VERSION
+ )
+ else:
+ client_info.gapic_version = _GAPIC_LIBRARY_VERSION
+ self._client_info = client_info
+
+ # Parse out the default settings for retry and timeout for each RPC
+ # from the client configuration.
+ # (Ordinarily, these are the defaults specified in the `*_config.py`
+ # file next to this one.)
+ self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
+ client_config["interfaces"][self._INTERFACE_NAME]
+ )
+
+ # Save a dictionary of cached API call functions.
+ # These are the actual callables which invoke the proper
+ # transport methods, wrapped with `wrap_method` to add retry,
+ # timeout, and the like.
+ self._inner_api_calls = {}
+
+ # Service calls
+ def lookup_entry(
+ self,
+ linked_resource=None,
+ sql_resource=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Get an entry by target resource name. This method allows clients to use
+ the resource name from the source Google Cloud Platform service to get the
+ Cloud Data Catalog Entry.
+
+ Example:
+ >>> from google.cloud import datacatalog_v1beta1
+ >>>
+ >>> client = datacatalog_v1beta1.DataCatalogClient()
+ >>>
+ >>> response = client.lookup_entry()
+
+ Args:
+ linked_resource (str): The full name of the Google Cloud Platform resource the Data Catalog
+ entry represents. See:
+ https://cloud.google.com/apis/design/resource\_names#full\_resource\_name
+ Full names are case-sensitive.
+
+ Examples:
+ "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId".
+ "//pubsub.googleapis.com/projects/projectId/topics/topicId"
+ sql_resource (str): The SQL name of the entry. SQL names are case-sensitive.
+
+ Examples:
+
+ 1. cloud\_pubsub.project\_id.topic\_id
+ 2. bigquery.project\_id.dataset\_id.table\_id
+ 3. datacatalog.project\_id.location\_id.entry\_group\_id.entry\_id
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will not
+ be retried.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.datacatalog_v1beta1.types.Entry` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "lookup_entry" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "lookup_entry"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.lookup_entry,
+ default_retry=self._method_configs["LookupEntry"].retry,
+ default_timeout=self._method_configs["LookupEntry"].timeout,
+ client_info=self._client_info,
+ )
+
+ # Sanity check: We have some fields which are mutually exclusive;
+ # raise ValueError if more than one is sent.
+ google.api_core.protobuf_helpers.check_oneof(
+ linked_resource=linked_resource, sql_resource=sql_resource
+ )
+
+ request = datacatalog_pb2.LookupEntryRequest(
+ linked_resource=linked_resource, sql_resource=sql_resource
+ )
+ return self._inner_api_calls["lookup_entry"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py
new file mode 100644
index 000000000000..9d706ea6d15d
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py
@@ -0,0 +1,28 @@
+config = {
+ "interfaces": {
+ "google.cloud.datacatalog.v1beta1.DataCatalog": {
+ "retry_codes": {
+ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "non_idempotent": [],
+ },
+ "retry_params": {
+ "default": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 20000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 20000,
+ "total_timeout_millis": 600000,
+ }
+ },
+ "methods": {
+ "LookupEntry": {
+ "timeout_millis": 60000,
+ "retry_codes_name": "idempotent",
+ "retry_params_name": "default",
+ }
+ },
+ }
+ }
+}
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py
new file mode 100644
index 000000000000..d2b50cec9297
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Wrappers for protocol buffer enum types."""
+
+import enum
+
+
+class EntryType(enum.IntEnum):
+ """
+ Entry resources in Cloud Data Catalog can be of different types e.g. BigQuery
+ Table entry is of type 'TABLE'. This enum describes all the possible types
+ Cloud Data Catalog contains.
+
+ Attributes:
+ ENTRY_TYPE_UNSPECIFIED (int): Default unknown type
+ TABLE (int): The type of entry that has a GoogleSQL schema, including logical views.
+ DATA_STREAM (int): An entry type which is used for streaming entries. Example - Pub/Sub.
+ """
+
+ ENTRY_TYPE_UNSPECIFIED = 0
+ TABLE = 2
+ DATA_STREAM = 3
+
+
+class TableSourceType(enum.IntEnum):
+ """
+ Table source type.
+
+ Attributes:
+ TABLE_SOURCE_TYPE_UNSPECIFIED (int): Default unknown type.
+ BIGQUERY_VIEW (int): Table view.
+ BIGQUERY_TABLE (int): BigQuery native table.
+ """
+
+ TABLE_SOURCE_TYPE_UNSPECIFIED = 0
+ BIGQUERY_VIEW = 2
+ BIGQUERY_TABLE = 5
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/__init__.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py
new file mode 100644
index 000000000000..9b0642b451b2
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import google.api_core.grpc_helpers
+
+from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2_grpc
+
+
+class DataCatalogGrpcTransport(object):
+ """gRPC transport class providing stubs for
+ google.cloud.datacatalog.v1beta1 DataCatalog API.
+
+ The transport provides access to the raw gRPC stubs,
+ which can be used to take advantage of advanced
+ features of gRPC.
+ """
+
+ # The scopes needed to make gRPC calls to all of the methods defined
+ # in this service.
+ _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+
+ def __init__(
+ self, channel=None, credentials=None, address="datacatalog.googleapis.com:443"
+ ):
+ """Instantiate the transport class.
+
+ Args:
+ channel (grpc.Channel): A ``Channel`` instance through
+ which to make calls. This argument is mutually exclusive
+ with ``credentials``; providing both will raise an exception.
+ credentials (google.auth.credentials.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ address (str): The address where the service is hosted.
+ """
+ # If both `channel` and `credentials` are specified, raise an
+ # exception (channels come with credentials baked in already).
+ if channel is not None and credentials is not None:
+ raise ValueError(
+ "The `channel` and `credentials` arguments are mutually " "exclusive."
+ )
+
+ # Create the channel.
+ if channel is None:
+ channel = self.create_channel(address=address, credentials=credentials)
+
+ self._channel = channel
+
+ # gRPC uses objects called "stubs" that are bound to the
+ # channel and provide a basic method for each RPC.
+ self._stubs = {
+ "data_catalog_stub": datacatalog_pb2_grpc.DataCatalogStub(channel)
+ }
+
+ @classmethod
+ def create_channel(cls, address="datacatalog.googleapis.com:443", credentials=None):
+ """Create and return a gRPC channel object.
+
+ Args:
+ address (str): The host for the channel to use.
+ credentials (~.Credentials): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return google.api_core.grpc_helpers.create_channel(
+ address, credentials=credentials, scopes=cls._OAUTH_SCOPES
+ )
+
+ @property
+ def channel(self):
+ """The gRPC channel used by the transport.
+
+ Returns:
+ grpc.Channel: A gRPC channel object.
+ """
+ return self._channel
+
+ @property
+ def lookup_entry(self):
+ """Return the gRPC stub for :meth:`DataCatalogClient.lookup_entry`.
+
+ Get an entry by target resource name. This method allows clients to use
+ the resource name from the source Google Cloud Platform service to get the
+ Cloud Data Catalog Entry.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["data_catalog_stub"].LookupEntry
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/__init__.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py
new file mode 100644
index 000000000000..94c6758bad4a
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py
@@ -0,0 +1,472 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/datacatalog_v1beta1/proto/datacatalog.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.cloud.datacatalog_v1beta1.proto import (
+ schema_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2,
+)
+from google.cloud.datacatalog_v1beta1.proto import (
+ table_spec_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_table__spec__pb2,
+)
+from google.cloud.datacatalog_v1beta1.proto import (
+ timestamps_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2,
+)
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/datacatalog_v1beta1/proto/datacatalog.proto",
+ package="google.cloud.datacatalog.v1beta1",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001"
+ ),
+ serialized_pb=_b(
+ '\n8google/cloud/datacatalog_v1beta1/proto/datacatalog.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/schema.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto"V\n\x12LookupEntryRequest\x12\x19\n\x0flinked_resource\x18\x01 \x01(\tH\x00\x12\x16\n\x0csql_resource\x18\x03 \x01(\tH\x00\x42\r\n\x0btarget_name"\x85\x03\n\x05\x45ntry\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0flinked_resource\x18\t \x01(\t\x12\x39\n\x04type\x18\x02 \x01(\x0e\x32+.google.cloud.datacatalog.v1beta1.EntryType\x12R\n\x13\x62igquery_table_spec\x18\x0c \x01(\x0b\x32\x33.google.cloud.datacatalog.v1beta1.BigQueryTableSpecH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x38\n\x06schema\x18\x05 \x01(\x0b\x32(.google.cloud.datacatalog.v1beta1.Schema\x12T\n\x18source_system_timestamps\x18\x07 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x0b\n\ttype_spec*C\n\tEntryType\x12\x1a\n\x16\x45NTRY_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05TABLE\x10\x02\x12\x0f\n\x0b\x44\x41TA_STREAM\x10\x03\x32\x9d\x01\n\x0b\x44\x61taCatalog\x12\x8d\x01\n\x0bLookupEntry\x12\x34.google.cloud.datacatalog.v1beta1.LookupEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/v1beta1/entries:lookupBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3'
+ ),
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_table__spec__pb2.DESCRIPTOR,
+ google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2.DESCRIPTOR,
+ ],
+)
+
+_ENTRYTYPE = _descriptor.EnumDescriptor(
+ name="EntryType",
+ full_name="google.cloud.datacatalog.v1beta1.EntryType",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="ENTRY_TYPE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="TABLE", index=1, number=2, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="DATA_STREAM", index=2, number=3, serialized_options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=771,
+ serialized_end=838,
+)
+_sym_db.RegisterEnumDescriptor(_ENTRYTYPE)
+
+EntryType = enum_type_wrapper.EnumTypeWrapper(_ENTRYTYPE)
+ENTRY_TYPE_UNSPECIFIED = 0
+TABLE = 2
+DATA_STREAM = 3
+
+
+_LOOKUPENTRYREQUEST = _descriptor.Descriptor(
+ name="LookupEntryRequest",
+ full_name="google.cloud.datacatalog.v1beta1.LookupEntryRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="linked_resource",
+ full_name="google.cloud.datacatalog.v1beta1.LookupEntryRequest.linked_resource",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="sql_resource",
+ full_name="google.cloud.datacatalog.v1beta1.LookupEntryRequest.sql_resource",
+ index=1,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="target_name",
+ full_name="google.cloud.datacatalog.v1beta1.LookupEntryRequest.target_name",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=291,
+ serialized_end=377,
+)
+
+
+_ENTRY = _descriptor.Descriptor(
+ name="Entry",
+ full_name="google.cloud.datacatalog.v1beta1.Entry",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="linked_resource",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.linked_resource",
+ index=1,
+ number=9,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="type",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.type",
+ index=2,
+ number=2,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="bigquery_table_spec",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.bigquery_table_spec",
+ index=3,
+ number=12,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="display_name",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.display_name",
+ index=4,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="description",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.description",
+ index=5,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="schema",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.schema",
+ index=6,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="source_system_timestamps",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.source_system_timestamps",
+ index=7,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="type_spec",
+ full_name="google.cloud.datacatalog.v1beta1.Entry.type_spec",
+ index=0,
+ containing_type=None,
+ fields=[],
+ )
+ ],
+ serialized_start=380,
+ serialized_end=769,
+)
+
+_LOOKUPENTRYREQUEST.oneofs_by_name["target_name"].fields.append(
+ _LOOKUPENTRYREQUEST.fields_by_name["linked_resource"]
+)
+_LOOKUPENTRYREQUEST.fields_by_name[
+ "linked_resource"
+].containing_oneof = _LOOKUPENTRYREQUEST.oneofs_by_name["target_name"]
+_LOOKUPENTRYREQUEST.oneofs_by_name["target_name"].fields.append(
+ _LOOKUPENTRYREQUEST.fields_by_name["sql_resource"]
+)
+_LOOKUPENTRYREQUEST.fields_by_name[
+ "sql_resource"
+].containing_oneof = _LOOKUPENTRYREQUEST.oneofs_by_name["target_name"]
+_ENTRY.fields_by_name["type"].enum_type = _ENTRYTYPE
+_ENTRY.fields_by_name[
+ "bigquery_table_spec"
+].message_type = (
+ google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_table__spec__pb2._BIGQUERYTABLESPEC
+)
+_ENTRY.fields_by_name[
+ "schema"
+].message_type = (
+ google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2._SCHEMA
+)
+_ENTRY.fields_by_name[
+ "source_system_timestamps"
+].message_type = (
+ google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS
+)
+_ENTRY.oneofs_by_name["type_spec"].fields.append(
+ _ENTRY.fields_by_name["bigquery_table_spec"]
+)
+_ENTRY.fields_by_name["bigquery_table_spec"].containing_oneof = _ENTRY.oneofs_by_name[
+ "type_spec"
+]
+DESCRIPTOR.message_types_by_name["LookupEntryRequest"] = _LOOKUPENTRYREQUEST
+DESCRIPTOR.message_types_by_name["Entry"] = _ENTRY
+DESCRIPTOR.enum_types_by_name["EntryType"] = _ENTRYTYPE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+LookupEntryRequest = _reflection.GeneratedProtocolMessageType(
+ "LookupEntryRequest",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_LOOKUPENTRYREQUEST,
+ __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2",
+ __doc__="""Request message for
+ [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry].
+
+
+ Attributes:
+ target_name:
+ Represents either the Google Cloud Platform resource or SQL
+ name for a Google Cloud Platform resource.
+ linked_resource:
+ The full name of the Google Cloud Platform resource the Data
+ Catalog entry represents. See: https://cloud.google.com/apis/d
+ esign/resource\_names#full\_resource\_name Full names are
+ case-sensitive. Examples: "//bigquery.googleapis.com/projects
+ /projectId/datasets/datasetId/tables/tableId".
+ "//pubsub.googleapis.com/projects/projectId/topics/topicId"
+ sql_resource:
+ The SQL name of the entry. SQL names are case-sensitive.
+ Examples: 1. cloud\_pubsub.project\_id.topic\_id 2.
+ bigquery.project\_id.dataset\_id.table\_id 3. datacatalog.proj
+ ect\_id.location\_id.entry\_group\_id.entry\_id
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.LookupEntryRequest)
+ ),
+)
+_sym_db.RegisterMessage(LookupEntryRequest)
+
+Entry = _reflection.GeneratedProtocolMessageType(
+ "Entry",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_ENTRY,
+ __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2",
+ __doc__="""Entry Metadata. A Data Catalog Entry resource represents another
+ resource in Google Cloud Platform, such as a BigQuery Dataset or a
+ Pub/Sub Topic. Clients can use the ``linked_resource`` field in the
+ Entry resource to refer to the original resource id of the source
+ system.
+
+ An Entry resource contains resource details, such as its schema.
+
+
+ Attributes:
+ name:
+ Output only. The Data Catalog resource name of the entry in
+ URL format. For example, "projects/{project\_id}/locations/{lo
+ cation}/entryGroups/{entry\_group\_id}/entries/{entry\_id}".
+ linked_resource:
+ The full name of the cloud resource the entry belongs to. See:
+ https://cloud.google.com/apis/design/resource\_names#full\_res
+ ource\_name Data Catalog supports resources from select
+ Google Cloud Platform systems. ``linked_resource`` is the full
+ name of the Google Cloud Platform resource. For example, the
+ ``linked_resource`` for a table resource from BigQuery is: "/
+ /bigquery.googleapis.com/projects/projectId/datasets/datasetId
+ /tables/tableId".
+ type:
+ Type of entry.
+ type_spec:
+ Type specification information.
+ bigquery_table_spec:
+ Specification that applies to a BigQuery table. This is only
+ valid on entries of type TABLE.
+ display_name:
+ Display information such as title and description. A short
+ name to identify the entry, for example, "Analytics Data - Jan
+ 2011".
+ description:
+ Entry description, which can consist of several sentences or
+ paragraphs that describe entry contents.
+ schema:
+ Schema of the entry.
+ source_system_timestamps:
+ Timestamps about the underlying Google Cloud Platform resource
+ -- not about this Data Catalog Entry.
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.Entry)
+ ),
+)
+_sym_db.RegisterMessage(Entry)
+
+
+DESCRIPTOR._options = None
+
+_DATACATALOG = _descriptor.ServiceDescriptor(
+ name="DataCatalog",
+ full_name="google.cloud.datacatalog.v1beta1.DataCatalog",
+ file=DESCRIPTOR,
+ index=0,
+ serialized_options=None,
+ serialized_start=841,
+ serialized_end=998,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="LookupEntry",
+ full_name="google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry",
+ index=0,
+ containing_service=None,
+ input_type=_LOOKUPENTRYREQUEST,
+ output_type=_ENTRY,
+ serialized_options=_b(
+ "\202\323\344\223\002\031\022\027/v1beta1/entries:lookup"
+ ),
+ )
+ ],
+)
+_sym_db.RegisterServiceDescriptor(_DATACATALOG)
+
+DESCRIPTOR.services_by_name["DataCatalog"] = _DATACATALOG
+
+# @@protoc_insertion_point(module_scope)
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py
new file mode 100644
index 000000000000..e4a572c9b6c5
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py
@@ -0,0 +1,53 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+
+from google.cloud.datacatalog_v1beta1.proto import (
+ datacatalog_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2,
+)
+
+
+class DataCatalogStub(object):
+ """Cloud Data Catalog is a service that allows clients to discover,
+ manage, and understand their Google Cloud data resources.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.LookupEntry = channel.unary_unary(
+ "/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry",
+ request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.LookupEntryRequest.SerializeToString,
+ response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.FromString,
+ )
+
+
+class DataCatalogServicer(object):
+ """Cloud Data Catalog is a service that allows clients to discover,
+ manage, and understand their Google Cloud data resources.
+ """
+
+ def LookupEntry(self, request, context):
+ """Get an entry by target resource name. This method allows clients to use
+ the resource name from the source Google Cloud Platform service to get the
+ Cloud Data Catalog Entry.
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
+
+
+def add_DataCatalogServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ "LookupEntry": grpc.unary_unary_rpc_method_handler(
+ servicer.LookupEntry,
+ request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.LookupEntryRequest.FromString,
+ response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.SerializeToString,
+ )
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.cloud.datacatalog.v1beta1.DataCatalog", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py
new file mode 100644
index 000000000000..569639284008
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py
@@ -0,0 +1,235 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/datacatalog_v1beta1/proto/schema.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/datacatalog_v1beta1/proto/schema.proto",
+ package="google.cloud.datacatalog.v1beta1",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001"
+ ),
+ serialized_pb=_b(
+ '\n3google/cloud/datacatalog_v1beta1/proto/schema.proto\x12 google.cloud.datacatalog.v1beta1"I\n\x06Schema\x12?\n\x07\x63olumns\x18\x02 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchema"\x93\x01\n\x0c\x43olumnSchema\x12\x0e\n\x06\x63olumn\x18\x06 \x01(\t\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04mode\x18\x03 \x01(\t\x12\x42\n\nsubcolumns\x18\x07 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3'
+ ),
+)
+
+
+_SCHEMA = _descriptor.Descriptor(
+ name="Schema",
+ full_name="google.cloud.datacatalog.v1beta1.Schema",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="columns",
+ full_name="google.cloud.datacatalog.v1beta1.Schema.columns",
+ index=0,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=89,
+ serialized_end=162,
+)
+
+
+_COLUMNSCHEMA = _descriptor.Descriptor(
+ name="ColumnSchema",
+ full_name="google.cloud.datacatalog.v1beta1.ColumnSchema",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="column",
+ full_name="google.cloud.datacatalog.v1beta1.ColumnSchema.column",
+ index=0,
+ number=6,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="type",
+ full_name="google.cloud.datacatalog.v1beta1.ColumnSchema.type",
+ index=1,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="description",
+ full_name="google.cloud.datacatalog.v1beta1.ColumnSchema.description",
+ index=2,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="mode",
+ full_name="google.cloud.datacatalog.v1beta1.ColumnSchema.mode",
+ index=3,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="subcolumns",
+ full_name="google.cloud.datacatalog.v1beta1.ColumnSchema.subcolumns",
+ index=4,
+ number=7,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=165,
+ serialized_end=312,
+)
+
+_SCHEMA.fields_by_name["columns"].message_type = _COLUMNSCHEMA
+_COLUMNSCHEMA.fields_by_name["subcolumns"].message_type = _COLUMNSCHEMA
+DESCRIPTOR.message_types_by_name["Schema"] = _SCHEMA
+DESCRIPTOR.message_types_by_name["ColumnSchema"] = _COLUMNSCHEMA
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Schema = _reflection.GeneratedProtocolMessageType(
+ "Schema",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SCHEMA,
+ __module__="google.cloud.datacatalog_v1beta1.proto.schema_pb2",
+ __doc__="""Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema).
+
+
+ Attributes:
+ columns:
+ Schema of columns. A maximum of 10,000 columns and sub-columns
+ can be specified.
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.Schema)
+ ),
+)
+_sym_db.RegisterMessage(Schema)
+
+ColumnSchema = _reflection.GeneratedProtocolMessageType(
+ "ColumnSchema",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_COLUMNSCHEMA,
+ __module__="google.cloud.datacatalog_v1beta1.proto.schema_pb2",
+ __doc__="""Representation of a column within a schema. Columns could be nested
+ inside other columns.
+
+
+ Attributes:
+ column:
+ Required. Name of the column.
+ type:
+ Required. Type of the column.
+ description:
+ Description of the column.
+ mode:
+ A column's mode indicates whether the values in this column
+ are required, nullable, etc. Only 'NULLABLE', 'REQUIRED' and
+ 'REPEATED' are supported, default mode is 'NULLABLE'.
+ subcolumns:
+ Schema of sub-columns.
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ColumnSchema)
+ ),
+)
+_sym_db.RegisterMessage(ColumnSchema)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2_grpc.py
new file mode 100644
index 000000000000..07cb78fe03a9
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2_grpc.py
@@ -0,0 +1,2 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py
new file mode 100644
index 000000000000..5404c4746a53
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py
@@ -0,0 +1,206 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/datacatalog_v1beta1/proto/table_spec.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/datacatalog_v1beta1/proto/table_spec.proto",
+ package="google.cloud.datacatalog.v1beta1",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001"
+ ),
+ serialized_pb=_b(
+ '\n7google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x12 google.cloud.datacatalog.v1beta1"\xa0\x01\n\x11\x42igQueryTableSpec\x12L\n\x11table_source_type\x18\x01 \x01(\x0e\x32\x31.google.cloud.datacatalog.v1beta1.TableSourceType\x12=\n\tview_spec\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.ViewSpec"\x1e\n\x08ViewSpec\x12\x12\n\nview_query\x18\x01 \x01(\t*[\n\x0fTableSourceType\x12!\n\x1dTABLE_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rBIGQUERY_VIEW\x10\x02\x12\x12\n\x0e\x42IGQUERY_TABLE\x10\x05\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3'
+ ),
+)
+
+_TABLESOURCETYPE = _descriptor.EnumDescriptor(
+ name="TableSourceType",
+ full_name="google.cloud.datacatalog.v1beta1.TableSourceType",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="TABLE_SOURCE_TYPE_UNSPECIFIED",
+ index=0,
+ number=0,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="BIGQUERY_VIEW", index=1, number=2, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="BIGQUERY_TABLE", index=2, number=5, serialized_options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=288,
+ serialized_end=379,
+)
+_sym_db.RegisterEnumDescriptor(_TABLESOURCETYPE)
+
+TableSourceType = enum_type_wrapper.EnumTypeWrapper(_TABLESOURCETYPE)
+TABLE_SOURCE_TYPE_UNSPECIFIED = 0
+BIGQUERY_VIEW = 2
+BIGQUERY_TABLE = 5
+
+
+_BIGQUERYTABLESPEC = _descriptor.Descriptor(
+ name="BigQueryTableSpec",
+ full_name="google.cloud.datacatalog.v1beta1.BigQueryTableSpec",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="table_source_type",
+ full_name="google.cloud.datacatalog.v1beta1.BigQueryTableSpec.table_source_type",
+ index=0,
+ number=1,
+ type=14,
+ cpp_type=8,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="view_spec",
+ full_name="google.cloud.datacatalog.v1beta1.BigQueryTableSpec.view_spec",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=94,
+ serialized_end=254,
+)
+
+
+_VIEWSPEC = _descriptor.Descriptor(
+ name="ViewSpec",
+ full_name="google.cloud.datacatalog.v1beta1.ViewSpec",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="view_query",
+ full_name="google.cloud.datacatalog.v1beta1.ViewSpec.view_query",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ )
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=256,
+ serialized_end=286,
+)
+
+_BIGQUERYTABLESPEC.fields_by_name["table_source_type"].enum_type = _TABLESOURCETYPE
+_BIGQUERYTABLESPEC.fields_by_name["view_spec"].message_type = _VIEWSPEC
+DESCRIPTOR.message_types_by_name["BigQueryTableSpec"] = _BIGQUERYTABLESPEC
+DESCRIPTOR.message_types_by_name["ViewSpec"] = _VIEWSPEC
+DESCRIPTOR.enum_types_by_name["TableSourceType"] = _TABLESOURCETYPE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+BigQueryTableSpec = _reflection.GeneratedProtocolMessageType(
+ "BigQueryTableSpec",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_BIGQUERYTABLESPEC,
+ __module__="google.cloud.datacatalog_v1beta1.proto.table_spec_pb2",
+ __doc__="""Describes a BigQuery table.
+
+
+ Attributes:
+ table_source_type:
+ The table source type.
+ view_spec:
+ Table view specification. This field should only be populated
+ if table\_source\_type is BIGQUERY\_VIEW.
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.BigQueryTableSpec)
+ ),
+)
+_sym_db.RegisterMessage(BigQueryTableSpec)
+
+ViewSpec = _reflection.GeneratedProtocolMessageType(
+ "ViewSpec",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_VIEWSPEC,
+ __module__="google.cloud.datacatalog_v1beta1.proto.table_spec_pb2",
+ __doc__="""Table view specification.
+
+
+ Attributes:
+ view_query:
+ The query that defines the table view.
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ViewSpec)
+ ),
+)
+_sym_db.RegisterMessage(ViewSpec)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2_grpc.py
new file mode 100644
index 000000000000..07cb78fe03a9
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2_grpc.py
@@ -0,0 +1,2 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py
new file mode 100644
index 000000000000..0643b30d14dd
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py
@@ -0,0 +1,147 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/cloud/datacatalog_v1beta1/proto/timestamps.proto
+
+import sys
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name="google/cloud/datacatalog_v1beta1/proto/timestamps.proto",
+ package="google.cloud.datacatalog.v1beta1",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001"
+ ),
+ serialized_pb=_b(
+ '\n7google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"\xa5\x01\n\x10SystemTimestamps\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3'
+ ),
+ dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR],
+)
+
+
+_SYSTEMTIMESTAMPS = _descriptor.Descriptor(
+ name="SystemTimestamps",
+ full_name="google.cloud.datacatalog.v1beta1.SystemTimestamps",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="create_time",
+ full_name="google.cloud.datacatalog.v1beta1.SystemTimestamps.create_time",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="update_time",
+ full_name="google.cloud.datacatalog.v1beta1.SystemTimestamps.update_time",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="expire_time",
+ full_name="google.cloud.datacatalog.v1beta1.SystemTimestamps.expire_time",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=127,
+ serialized_end=292,
+)
+
+_SYSTEMTIMESTAMPS.fields_by_name[
+ "create_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_SYSTEMTIMESTAMPS.fields_by_name[
+ "update_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_SYSTEMTIMESTAMPS.fields_by_name[
+ "expire_time"
+].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+DESCRIPTOR.message_types_by_name["SystemTimestamps"] = _SYSTEMTIMESTAMPS
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+SystemTimestamps = _reflection.GeneratedProtocolMessageType(
+ "SystemTimestamps",
+ (_message.Message,),
+ dict(
+ DESCRIPTOR=_SYSTEMTIMESTAMPS,
+ __module__="google.cloud.datacatalog_v1beta1.proto.timestamps_pb2",
+ __doc__="""Timestamps about this resource according to a particular system.
+
+
+ Attributes:
+ create_time:
+ Output only. The creation time of the resource within the
+ given system.
+ update_time:
+ Output only. The last-modified time of the resource within the
+ given system.
+ expire_time:
+ Output only. The expiration time of the resource within the
+ given system.
+ """,
+ # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SystemTimestamps)
+ ),
+)
+_sym_db.RegisterMessage(SystemTimestamps)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2_grpc.py
new file mode 100644
index 000000000000..07cb78fe03a9
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2_grpc.py
@@ -0,0 +1,2 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/types.py b/datacatalog/google/cloud/datacatalog_v1beta1/types.py
new file mode 100644
index 000000000000..1338be8895d3
--- /dev/null
+++ b/datacatalog/google/cloud/datacatalog_v1beta1/types.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+import sys
+
+from google.api_core.protobuf_helpers import get_messages
+
+from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2
+from google.cloud.datacatalog_v1beta1.proto import schema_pb2
+from google.cloud.datacatalog_v1beta1.proto import table_spec_pb2
+from google.cloud.datacatalog_v1beta1.proto import timestamps_pb2
+from google.protobuf import timestamp_pb2
+
+_shared_modules = [timestamp_pb2]
+
+_local_modules = [datacatalog_pb2, schema_pb2, table_spec_pb2, timestamps_pb2]
+
+names = []
+
+for module in _shared_modules:
+ for name, message in get_messages(module).items():
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+for module in _local_modules:
+ for name, message in get_messages(module).items():
+ message.__module__ = "google.cloud.datacatalog_v1beta1.types"
+ setattr(sys.modules[__name__], name, message)
+ names.append(name)
+
+__all__ = tuple(sorted(names))
diff --git a/datacatalog/noxfile.py b/datacatalog/noxfile.py
new file mode 100644
index 000000000000..9e3b4be9fa0f
--- /dev/null
+++ b/datacatalog/noxfile.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+import os
+
+import nox
+
+
+LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core"))
+
+@nox.session(python="3.7")
+def lint(session):
+ """Run linters.
+
+ Returns a failure if the linters find linting errors or sufficiently
+ serious code quality issues.
+ """
+ session.install("flake8", "black", *LOCAL_DEPS)
+ session.run(
+ "black",
+ "--check",
+ "google",
+ "tests",
+ "docs",
+ )
+ session.run("flake8", "google", "tests")
+
+
+@nox.session(python="3.6")
+def blacken(session):
+ """Run black.
+
+ Format code to uniform standard.
+
+ This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
+ That run uses an image that doesn't have 3.6 installed. Before updating this
+ check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
+ """
+ session.install("black")
+ session.run(
+ "black",
+ "google",
+ "tests",
+ "docs",
+ )
+
+
+@nox.session(python="3.7")
+def lint_setup_py(session):
+ """Verify that setup.py is valid (including RST check)."""
+ session.install("docutils", "pygments")
+ session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
+
+
+def default(session):
+ # Install all test dependencies, then install this package in-place.
+ session.install("mock", "pytest", "pytest-cov")
+ for local_dep in LOCAL_DEPS:
+ session.install("-e", local_dep)
+ session.install("-e", ".")
+
+ # Run py.test against the unit tests.
+ session.run(
+ "py.test",
+ "--quiet",
+ "--cov=google.cloud",
+ "--cov=tests.unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=80",
+ os.path.join("tests", "unit"),
+ *session.posargs,
+ )
+
+
+@nox.session(python=["2.7", "3.5", "3.6", "3.7"])
+def unit(session):
+ """Run the unit test suite."""
+ default(session)
+
+
+@nox.session(python=["2.7", "3.7"])
+def system(session):
+ """Run the system test suite."""
+ system_test_path = os.path.join("tests", "system.py")
+ system_test_folder_path = os.path.join("tests", "system")
+ # Sanity check: Only run tests if the environment variable is set.
+ if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
+ session.skip("Credentials must be set via environment variable")
+
+ system_test_exists = os.path.exists(system_test_path)
+ system_test_folder_exists = os.path.exists(system_test_folder_path)
+ # Sanity check: only run tests if found.
+ if not system_test_exists and not system_test_folder_exists:
+ session.skip("System tests were not found")
+
+ # Use pre-release gRPC for system tests.
+ session.install("--pre", "grpcio")
+
+ # Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
+ session.install("mock", "pytest")
+ for local_dep in LOCAL_DEPS:
+ session.install("-e", local_dep)
+ session.install("-e", "../test_utils/")
+ session.install("-e", ".")
+
+ # Run py.test against the system tests.
+ if system_test_exists:
+ session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ if system_test_folder_exists:
+ session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+
+
+@nox.session(python="3.7")
+def cover(session):
+ """Run the final coverage report.
+
+ This outputs the coverage report aggregating coverage from the unit
+ test runs (not system test runs), and then erases coverage data.
+ """
+ session.install("coverage", "pytest-cov")
+ session.run("coverage", "report", "--show-missing", "--fail-under=80")
+
+ session.run("coverage", "erase")
diff --git a/datacatalog/setup.cfg b/datacatalog/setup.cfg
new file mode 100644
index 000000000000..2a9acf13daa9
--- /dev/null
+++ b/datacatalog/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal = 1
diff --git a/datacatalog/setup.py b/datacatalog/setup.py
new file mode 100644
index 000000000000..0a40a2e88534
--- /dev/null
+++ b/datacatalog/setup.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import os
+
+import setuptools
+
+name = 'google-cloud-datacatalog'
+description = 'Google Cloud Data Catalog API API client library'
+version = '0.1.0'
+# Should be one of:
+# 'Development Status :: 3 - Alpha'
+# 'Development Status :: 4 - Beta'
+# 'Development Status :: 5 - Production/Stable'
+release_status = 'Development Status :: 3 - Alpha'
+dependencies = [
+ 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev',
+ 'enum34; python_version < "3.4"',
+]
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+readme_filename = os.path.join(package_root, 'README.rst')
+with io.open(readme_filename, encoding='utf-8') as readme_file:
+ readme = readme_file.read()
+
+packages = [
+ package for package in setuptools.find_packages()
+ if package.startswith('google')
+]
+
+namespaces = ['google']
+if 'google.cloud' in packages:
+ namespaces.append('google.cloud')
+
+setuptools.setup(
+ name=name,
+ version=version,
+ description=description,
+ long_description=readme,
+ author='Google LLC',
+ author_email='googleapis-packages@google.com',
+ license='Apache 2.0',
+ url='https://github.com/googleapis/google-cloud-python',
+ classifiers=[
+ release_status,
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Operating System :: OS Independent',
+ 'Topic :: Internet',
+ ],
+ platforms='Posix; MacOS X; Windows',
+ packages=packages,
+ namespace_packages=namespaces,
+ install_requires=dependencies,
+ include_package_data=True,
+ zip_safe=False,
+)
diff --git a/datacatalog/synth.metadata b/datacatalog/synth.metadata
new file mode 100644
index 000000000000..7c51bc38137e
--- /dev/null
+++ b/datacatalog/synth.metadata
@@ -0,0 +1,31 @@
+{
+ "updateTime": "2019-04-15T18:20:24.527055Z",
+ "sources": [
+ {
+ "generator": {
+ "name": "artman",
+ "version": "0.16.25",
+ "dockerImage": "googleapis/artman@sha256:d9597f983d1d4e61272c63cb97b7d8f8234da9999526c35d357de3d781f0ec1b"
+ }
+ },
+ {
+ "template": {
+ "name": "python_library",
+ "origin": "synthtool.gcp",
+ "version": "2019.4.10"
+ }
+ }
+ ],
+ "destinations": [
+ {
+ "client": {
+ "source": "googleapis",
+ "apiName": "datacatalog",
+ "apiVersion": "v1beta1",
+ "language": "python",
+ "generator": "gapic",
+ "config": "google/cloud/datacatalog/artman_datacatalog_v1beta1.yaml"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/datacatalog/synth.py b/datacatalog/synth.py
new file mode 100644
index 000000000000..0b81f3f62c93
--- /dev/null
+++ b/datacatalog/synth.py
@@ -0,0 +1,53 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script is used to synthesize generated parts of this library."""
+
+import synthtool as s
+from synthtool import gcp
+
+gapic = gcp.GAPICGenerator()
+common = gcp.CommonTemplates()
+version = 'v1beta1'
+
+# ----------------------------------------------------------------------------
+# Generate datacatalog GAPIC layer
+# ----------------------------------------------------------------------------
+library = gapic.py_library(
+ 'datacatalog',
+ version,
+ config_path='/google/cloud/datacatalog/artman_datacatalog_v1beta1.yaml',
+ artman_output_name='datacatalog-v1beta1',
+)
+
+s.move(
+ library,
+ excludes=[
+ 'docs/conf.py',
+ 'docs/index.rst',
+ 'google/cloud/datacatalog_v1beta1/__init__.py',
+ 'README.rst',
+ 'nox*.py',
+ 'setup.py',
+ 'setup.cfg',
+ ],
+)
+
+# ----------------------------------------------------------------------------
+# Add templated files
+# ----------------------------------------------------------------------------
+templated_files = common.py_library(unit_cov_level=80, cov_level=80)
+s.move(templated_files)
+
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py b/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py
new file mode 100644
index 000000000000..b3d795ccb841
--- /dev/null
+++ b/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Unit tests."""
+
+import mock
+import pytest
+
+from google.cloud import datacatalog_v1beta1
+from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2
+
+
+class MultiCallableStub(object):
+ """Stub for the grpc.UnaryUnaryMultiCallable interface."""
+
+ def __init__(self, method, channel_stub):
+ self.method = method
+ self.channel_stub = channel_stub
+
+ def __call__(self, request, timeout=None, metadata=None, credentials=None):
+ self.channel_stub.requests.append((self.method, request))
+
+ response = None
+ if self.channel_stub.responses:
+ response = self.channel_stub.responses.pop()
+
+ if isinstance(response, Exception):
+ raise response
+
+ if response:
+ return response
+
+
+class ChannelStub(object):
+ """Stub for the grpc.Channel interface."""
+
+ def __init__(self, responses=[]):
+ self.responses = responses
+ self.requests = []
+
+ def unary_unary(self, method, request_serializer=None, response_deserializer=None):
+ return MultiCallableStub(method, self)
+
+
+class CustomException(Exception):
+ pass
+
+
+class TestDataCatalogClient(object):
+ def test_lookup_entry(self):
+ # Setup Expected Response
+ name = "name3373707"
+ linked_resource = "linkedResource1544625012"
+ display_name = "displayName1615086568"
+ description = "description-1724546052"
+ expected_response = {
+ "name": name,
+ "linked_resource": linked_resource,
+ "display_name": display_name,
+ "description": description,
+ }
+ expected_response = datacatalog_pb2.Entry(**expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[expected_response])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = datacatalog_v1beta1.DataCatalogClient()
+
+ response = client.lookup_entry()
+ assert expected_response == response
+
+ assert len(channel.requests) == 1
+ expected_request = datacatalog_pb2.LookupEntryRequest()
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_lookup_entry_exception(self):
+ # Mock the API response
+ channel = ChannelStub(responses=[CustomException()])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = datacatalog_v1beta1.DataCatalogClient()
+
+ with pytest.raises(CustomException):
+ client.lookup_entry()
diff --git a/docs/datacatalog b/docs/datacatalog
new file mode 120000
index 000000000000..41b79354a771
--- /dev/null
+++ b/docs/datacatalog
@@ -0,0 +1 @@
+../datacatalog/docs
\ No newline at end of file
diff --git a/docs/index.rst b/docs/index.rst
index beb5be0990a5..3f38923b9946 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -10,6 +10,7 @@
BigQuery Storage
Bigtable
Container
+ Data Catalog
Data Labeling
Data Loss Prevention
Dataproc
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 99b40f458c77..04ebe6495fba 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -14,6 +14,7 @@ grpcio-gcp >= 0.2.2
-e bigquery_storage/
-e bigtable/
-e container/
+-e datacatalog/
-e datalabeling/
-e dataproc/
-e datastore/